Compare commits

...

10 Commits

Author SHA1 Message Date
Angela P Wen
fbe39bdbdf Update changenote 2024-09-26 13:48:39 -07:00
Angela P Wen
0c5c594f45 Refactor: reuse getArtifactUploaderClient in start-proxy-action 2024-09-26 13:41:28 -07:00
Angela P Wen
ce99e3489b Add log messages at INFO level about which artifact version used 2024-09-26 13:33:43 -07:00
Angela P Wen
bca51cd79e Rename feature flag to CODEQL_ACTION_ARTIFACT_V2_UPGRADE 2024-09-26 13:21:27 -07:00
Angela P Wen
8e4d3c2dba Refactor: remove argument array indirection and pass directly instead 2024-09-26 13:19:49 -07:00
Angela P Wen
4a5884aaa5 Add changenote 2024-09-25 14:30:35 -07:00
Angela P Wen
35684bafaa Add PR check with feature flag enabled to test upgrade 2024-09-25 14:14:09 -07:00
Angela P Wen
4de3002533 Use new artifact dependency if not on GHES & feature flag enabled 2024-09-25 13:58:25 -07:00
Angela P Wen
6d887c18f0 Update original import statements to use artifact-legacy 2024-09-25 12:08:39 -07:00
Angela P Wen
76b799de1d Install actions/artifact@v1 as artifact-legacy and v2 as artifact 2024-09-25 12:06:16 -07:00
5393 changed files with 2176869 additions and 71870 deletions

View File

@@ -0,0 +1,99 @@
# Checks logs, SARIF, and database bundle debug artifacts exist and are accessible
# with download-artifact@v4 when CODEQL_ACTION_ARTIFACT_UPGRADE is set to true.
name: PR Check - Debug artifact upload using artifact@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODEQL_ACTION_ARTIFACT_V2_UPGRADE: true
on:
push:
branches:
- main
- releases/v*
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
schedule:
- cron: '0 5 * * *'
workflow_dispatch: {}
jobs:
upload-artifacts:
strategy:
fail-fast: false
matrix:
version:
- stable-v2.13.5
- stable-v2.14.6
- stable-v2.15.5
- stable-v2.16.6
- stable-v2.17.6
- default
- linked
- nightly-latest
name: Upload debug artifacts
env:
CODEQL_ACTION_TEST_MODE: true
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Prepare test
id: prepare-test
uses: ./.github/actions/prepare-test
with:
version: ${{ matrix.version }}
- uses: actions/setup-go@v5
with:
go-version: ^1.13.1
- uses: ./../action/init
id: init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
debug: true
debug-artifact-name: my-debug-artifacts
debug-database-name: my-db
# We manually exclude Swift from the languages list here, as it is not supported on Ubuntu
languages: cpp,csharp,go,java,javascript,python,ruby
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
id: analysis
download-and-check-artifacts:
name: Download and check debug artifacts
needs: upload-artifacts
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v4
- name: Check expected artifacts exist
shell: bash
run: |
VERSIONS="stable-v2.13.5 stable-v2.14.6 stable-v2.15.5 stable-v2.16.6 stable-v2.17.6 default linked nightly-latest"
LANGUAGES="cpp csharp go java javascript python"
for version in $VERSIONS; do
pushd "./my-debug-artifacts-${version//./}"
echo "Artifacts from version $version:"
for language in $LANGUAGES; do
echo "- Checking $language"
if [[ ! -f "$language.sarif" ]] ; then
echo "Missing a SARIF file for $language"
exit 1
fi
if [[ ! -f "my-db-$language.zip" ]] ; then
echo "Missing a database bundle for $language"
exit 1
fi
if [[ ! -d "$language/log" ]] ; then
echo "Missing logs for $language"
exit 1
fi
done
popd
done
env:
GO111MODULE: auto

View File

@@ -6,7 +6,9 @@ Note that the only difference between `v2` and `v3` of the CodeQL Action is the
## [UNRELEASED]
No user facing changes.
- Add support for using `actions/download-artifact@v4` to programmatically consume CodeQL Action debug artifacts. Customers who use `actions/download-artifact` should set the `CODEQL_ACTION_ARTIFACT_UPGRADE` environment variable to true and bump `actions/download-artifact@v3` to `actions/download-artifact@v4` to avoid breakage. All customers, except those running on GitHub Enterprise Server, will be opted-in to this change in early November. [#2482](https://github.com/github/codeql-action/pull/2482)
- This is because `actions/upload-artifact@v3` and `actions/download-artifact@v3` will be deprecated at the end of November. See [GitHub Changelog](https://github.blog/changelog/2024-04-16-deprecation-notice-v3-of-the-artifact-actions/).
- This change is currently unavailable for GitHub Enterprise Server customers, as the compatible npm `@actions/artifact@v2` used to upload the artifacts is not yet supported on GHES.
## 3.26.9 - 24 Sep 2024

View File

@@ -29,17 +29,29 @@ Object.defineProperty(exports, "__esModule", { value: true });
* other `post:` hooks.
*/
const core = __importStar(require("@actions/core"));
const actions_util_1 = require("./actions-util");
const api_client_1 = require("./api-client");
const config_utils_1 = require("./config-utils");
const debugArtifacts = __importStar(require("./debug-artifacts"));
const environment_1 = require("./environment");
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util_1 = require("./util");
async function runWrapper() {
try {
const logger = (0, logging_1.getActionsLogger)();
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
// Upload SARIF artifacts if we determine that this is a first-party analysis run.
// For third-party runs, this artifact will be uploaded in the `upload-sarif-post` step.
if (process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] === "true") {
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger));
const config = await (0, config_utils_1.getConfig)((0, actions_util_1.getTemporaryDirectory)(), logger);
if (config !== undefined) {
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, features));
}
}
}
catch (error) {

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAyC;AAEzC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAElC,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CAAC,MAAM,CAAC,CACpD,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,iDAAuD;AACvD,6CAAgD;AAChD,iDAA2C;AAC3C,kEAAoD;AACpD,+CAAuC;AACvC,mDAA2C;AAC3C,uCAAwD;AACxD,6CAAkD;AAClD,iCAIgB;AAEhB,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;YAChE,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,QAAQ,CACT,CACF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

45
lib/debug-artifacts.js generated
View File

@@ -30,9 +30,11 @@ exports.sanitizeArtifactName = sanitizeArtifactName;
exports.uploadCombinedSarifArtifacts = uploadCombinedSarifArtifacts;
exports.tryUploadAllAvailableDebugArtifacts = tryUploadAllAvailableDebugArtifacts;
exports.uploadDebugArtifacts = uploadDebugArtifacts;
exports.getArtifactUploaderClient = getArtifactUploaderClient;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const artifact = __importStar(require("@actions/artifact"));
const artifactLegacy = __importStar(require("@actions/artifact-legacy"));
const core = __importStar(require("@actions/core"));
const adm_zip_1 = __importDefault(require("adm-zip"));
const del_1 = __importDefault(require("del"));
@@ -40,6 +42,7 @@ const actions_util_1 = require("./actions-util");
const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql");
const environment_1 = require("./environment");
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const util_1 = require("./util");
function sanitizeArtifactName(name) {
@@ -49,7 +52,7 @@ function sanitizeArtifactName(name) {
* Upload Actions SARIF artifacts for debugging when CODEQL_ACTION_DEBUG_COMBINED_SARIF
* environment variable is set
*/
async function uploadCombinedSarifArtifacts(logger) {
async function uploadCombinedSarifArtifacts(logger, gitHubVariant, features) {
const tempDir = (0, actions_util_1.getTemporaryDirectory)();
// Upload Actions SARIF artifacts for debugging when environment variable is set
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
@@ -68,7 +71,7 @@ async function uploadCombinedSarifArtifacts(logger) {
}
}
try {
await uploadDebugArtifacts(toUpload, baseTempDir, "combined-sarif-artifacts");
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, features);
}
catch (e) {
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
@@ -128,7 +131,7 @@ async function tryBundleDatabase(config, language, logger) {
*
* Logs and suppresses any errors that occur.
*/
async function tryUploadAllAvailableDebugArtifacts(config, logger) {
async function tryUploadAllAvailableDebugArtifacts(config, logger, features) {
const filesToUpload = [];
try {
for (const language of config.languages) {
@@ -168,13 +171,13 @@ async function tryUploadAllAvailableDebugArtifacts(config, logger) {
return;
}
try {
await (0, logging_1.withGroup)("Uploading debug artifacts", async () => uploadDebugArtifacts(filesToUpload, config.dbLocation, config.debugArtifactName));
await (0, logging_1.withGroup)("Uploading debug artifacts", async () => uploadDebugArtifacts(logger, filesToUpload, config.dbLocation, config.debugArtifactName, config.gitHubVersion.type, features));
}
catch (e) {
logger.warning(`Failed to upload debug artifacts. Reason: ${(0, util_1.getErrorMessage)(e)}`);
}
}
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghVariant, features) {
if (toUpload.length === 0) {
return;
}
@@ -189,11 +192,33 @@ async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
core.info("Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input.");
}
}
await artifact.create().uploadArtifact(sanitizeArtifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir), {
continueOnError: true,
// ensure we don't keep the debug artifacts around for too long since they can be large.
retentionDays: 7,
});
const artifactUploader = await getArtifactUploaderClient(logger, ghVariant, features);
try {
await artifactUploader.uploadArtifact(sanitizeArtifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir), {
// ensure we don't keep the debug artifacts around for too long since they can be large.
retentionDays: 7,
});
}
catch (e) {
// A failure to upload debug artifacts should not fail the entire action.
core.warning(`Failed to upload debug artifacts: ${e}`);
}
}
// `@actions/artifact@v2` is not yet supported on GHES so the legacy version of the client will be used on GHES
// until it is supported. We also use the legacy version of the client if the feature flag is disabled.
async function getArtifactUploaderClient(logger, ghVariant, features) {
if (ghVariant === util_1.GitHubVariant.GHES) {
logger.info("Uploading debug artifacts using the `@actions/artifact@v1` client because the `v2` version is not yet supported on GHES.");
return artifactLegacy.create();
}
else if (!(await features.getValue(feature_flags_1.Feature.ArtifactV2Upgrade))) {
logger.info("Uploading debug artifacts using the `@actions/artifact@v1` client because the value of the relevant feature flag is false. To use the `v2` version of the client, set the `CODEQL_ACTION_ARTIFACT_V2_UPGRADE` environment variable to true.");
return artifactLegacy.create();
}
else {
logger.info("Uploading debug artifacts using the `@actions/artifact@v2` client.");
return new artifact.DefaultArtifactClient();
}
}
/**
* If a database has not been finalized, we cannot run the `codeql database bundle`

File diff suppressed because one or more lines are too long

View File

@@ -28,6 +28,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const debugArtifacts = __importStar(require("./debug-artifacts"));
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
(0, ava_1.default)("sanitizeArtifactName", (t) => {
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello-world_"), "hello-world_");
t.deepEqual(debugArtifacts.sanitizeArtifactName("hello`world`"), "helloworld");
@@ -36,6 +40,8 @@ const debugArtifacts = __importStar(require("./debug-artifacts"));
});
(0, ava_1.default)("uploadDebugArtifacts", async (t) => {
// Test that no error is thrown if artifacts list is empty.
await t.notThrowsAsync(debugArtifacts.uploadDebugArtifacts([], "rootDir", "artifactName"));
const logger = (0, logging_1.getActionsLogger)();
const mockFeature = (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.ArtifactV2Upgrade]);
await t.notThrowsAsync(debugArtifacts.uploadDebugArtifacts(logger, [], "rootDir", "artifactName", util_1.GitHubVariant.DOTCOM, mockFeature));
});
//# sourceMappingURL=debug-artifacts.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AAEpD,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,2DAA2D;IAC3D,MAAM,CAAC,CAAC,cAAc,CACpB,cAAc,CAAC,oBAAoB,CAAC,EAAE,EAAE,SAAS,EAAE,cAAc,CAAC,CACnE,CAAC;AACJ,CAAC,CAAC,CAAC"}
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AACpD,mDAA0C;AAC1C,uCAA6C;AAC7C,mDAAiD;AACjD,iCAAuC;AAEvC,IAAA,aAAI,EAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,EAAE;IACjC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,cAAc,CAAC,EACnD,YAAY,CACb,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,oBAAoB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC5E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,EAC9D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,2DAA2D;IAC3D,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,WAAW,GAAG,IAAA,8BAAc,EAAC,CAAC,uBAAO,CAAC,iBAAiB,CAAC,CAAC,CAAC;IAChE,MAAM,CAAC,CAAC,cAAc,CACpB,cAAc,CAAC,oBAAoB,CACjC,MAAM,EACN,EAAE,EACF,SAAS,EACT,cAAc,EACd,oBAAa,CAAC,MAAM,EACpB,WAAW,CACZ,CACF,CAAC;AACJ,CAAC,CAAC,CAAC"}

6
lib/feature-flags.js generated
View File

@@ -44,6 +44,7 @@ exports.CODEQL_VERSION_FINE_GRAINED_PARALLELISM = "2.15.1";
*/
var Feature;
(function (Feature) {
Feature["ArtifactV2Upgrade"] = "artifact_v2_upgrade";
Feature["CleanupTrapCaches"] = "cleanup_trap_caches";
Feature["CppDependencyInstallation"] = "cpp_dependency_installation_enabled";
Feature["DisableCsharpBuildless"] = "disable_csharp_buildless";
@@ -53,6 +54,11 @@ var Feature;
Feature["QaTelemetryEnabled"] = "qa_telemetry_enabled";
})(Feature || (exports.Feature = Feature = {}));
exports.featureConfig = {
[Feature.ArtifactV2Upgrade]: {
defaultValue: false,
envVar: "CODEQL_ACTION_ARTIFACT_V2_UPGRADE",
minimumVersion: undefined,
},
[Feature.CleanupTrapCaches]: {
defaultValue: false,
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",

File diff suppressed because one or more lines are too long

View File

@@ -132,7 +132,7 @@ async function run(uploadAllAvailableDebugArtifacts, printDebugLogs, config, rep
// Upload appropriate Actions artifacts for debugging
if (config.debugMode) {
logger.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
await uploadAllAvailableDebugArtifacts(config, logger);
await uploadAllAvailableDebugArtifacts(config, logger, features);
await printDebugLogs(config);
}
if (actionsUtil.isSelfHostedRunner()) {

File diff suppressed because one or more lines are too long

View File

@@ -28,10 +28,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
* It will run after the all steps in this job, in reverse order in relation to
* other `post:` hooks.
*/
const artifact = __importStar(require("@actions/artifact"));
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client");
const configUtils = __importStar(require("./config-utils"));
const debug_artifacts_1 = require("./debug-artifacts");
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util_1 = require("./util");
async function runWrapper() {
try {
@@ -47,11 +51,19 @@ async function runWrapper() {
if ((config && config.debugMode) || core.isDebug()) {
const logFilePath = core.getState("proxy-log-file");
core.info("Debug mode is on. Uploading proxy log as Actions debugging artifact...");
if (config?.gitHubVersion.type === undefined) {
core.warning(`Did not upload debug artifacts because cannot determine the GitHub variant running.`);
return;
}
const logger = (0, logging_1.getActionsLogger)();
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
try {
await artifact
.create()
.uploadArtifact("proxy-log-file", [logFilePath], actionsUtil.getTemporaryDirectory(), {
continueOnError: true,
const artifactUploader = await (0, debug_artifacts_1.getArtifactUploaderClient)(logger, gitHubVersion.type, features);
await artifactUploader.uploadArtifact("proxy-log-file", [logFilePath], actionsUtil.getTemporaryDirectory(), {
// ensure we don't keep the debug artifacts around for too long since they can be large.
retentionDays: 7,
});
}

View File

@@ -1 +1 @@
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,4DAA8C;AAC9C,oDAAsC;AAEtC,4DAA8C;AAC9C,4DAA8C;AAC9C,iCAAyC;AAEzC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5B,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,IAAI,CACL,CAAC;IAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;QACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;QACpD,IAAI,CAAC,IAAI,CACP,wEAAwE,CACzE,CAAC;QACF,IAAI,CAAC;YACH,MAAM,QAAQ;iBACX,MAAM,EAAE;iBACR,cAAc,CACb,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,eAAe,EAAE,IAAI;gBACrB,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACN,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,yEAAyE;YACzE,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,EAAE,CAAC,CAAC;QACzD,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,4DAA8C;AAC9C,uDAA8D;AAC9D,mDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,iCAIgB;AAEhB,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5B,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;IACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,IAAI,CACL,CAAC;IAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;QACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;QACpD,IAAI,CAAC,IAAI,CACP,wEAAwE,CACzE,CAAC;QACF,IAAI,MAAM,EAAE,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC7C,IAAI,CAAC,OAAO,CACV,qFAAqF,CACtF,CAAC;YACF,OAAO;QACT,CAAC;QAED,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,IAAI,CAAC;YACH,MAAM,gBAAgB,GAAG,MAAM,IAAA,2CAAyB,EACtD,MAAM,EACN,aAAa,CAAC,IAAI,EAClB,QAAQ,CACT,CAAC;YAEF,MAAM,gBAAgB,CAAC,cAAc,CACnC,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,wFAAwF;gBACxF,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,yEAAyE;YACzE,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,EAAE,CAAC,CAAC;QACzD,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

View File

@@ -29,17 +29,29 @@ Object.defineProperty(exports, "__esModule", { value: true });
* other `post:` hooks.
*/
const core = __importStar(require("@actions/core"));
const actions_util_1 = require("./actions-util");
const api_client_1 = require("./api-client");
const debugArtifacts = __importStar(require("./debug-artifacts"));
const environment_1 = require("./environment");
const feature_flags_1 = require("./feature-flags");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util_1 = require("./util");
async function runWrapper() {
try {
const logger = (0, logging_1.getActionsLogger)();
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
// Upload SARIF artifacts if we determine that this is a third-party analysis run.
// For first-party runs, this artifact will be uploaded in the `analyze-post` step.
if (process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] !== "true") {
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger));
if (gitHubVersion.type === undefined) {
core.warning(`Did not upload debug artifacts because cannot determine the GitHub variant running.`);
return;
}
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, gitHubVersion.type, features));
}
}
catch (error) {

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action-post.js","sourceRoot":"","sources":["../src/upload-sarif-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAyC;AAEzC,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,kFAAkF;QAClF,mFAAmF;QACnF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CAAC,MAAM,CAAC,CACpD,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,yCAAyC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAClE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"upload-sarif-action-post.js","sourceRoot":"","sources":["../src/upload-sarif-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,iDAAuD;AACvD,6CAAgD;AAChD,kEAAoD;AACpD,+CAAuC;AACvC,mDAA2C;AAC3C,uCAAwD;AACxD,6CAAkD;AAClD,iCAIgB;AAEhB,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,kFAAkF;QAClF,mFAAmF;QACnF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,IAAI,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBACrC,IAAI,CAAC,OAAO,CACV,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,aAAa,CAAC,IAAI,EAClB,QAAQ,CACT,CACF,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,yCAAyC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAClE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

1
node_modules/.bin/crc32 generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../crc-32/bin/crc32.njs

1
node_modules/.bin/dot-object generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../dot-object/bin/dot-object

1
node_modules/.bin/fxparser generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../fast-xml-parser/src/cli/cli.js

1
node_modules/.bin/glob generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../glob/dist/esm/bin.mjs

1
node_modules/.bin/mkdirp generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../mkdirp/bin/cmd.js

1
node_modules/.bin/protoc generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../@protobuf-ts/protoc/protoc.js

1
node_modules/.bin/protoc-gen-dump generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../@protobuf-ts/plugin/bin/protoc-gen-dump

1
node_modules/.bin/protoc-gen-ts generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../@protobuf-ts/plugin/bin/protoc-gen-ts

1
node_modules/.bin/protoc-gen-twirp_ts generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../twirp-ts/protoc-gen-twirp_ts

1435
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

9
node_modules/@actions/artifact-legacy/LICENSE.md generated vendored Normal file
View File

@@ -0,0 +1,9 @@
The MIT License (MIT)
Copyright 2019 GitHub
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

213
node_modules/@actions/artifact-legacy/README.md generated vendored Normal file
View File

@@ -0,0 +1,213 @@
# `@actions/artifact`
## Usage
You can use this package to interact with the actions artifacts.
- [Upload an Artifact](#Upload-an-Artifact)
- [Download a Single Artifact](#Download-a-Single-Artifact)
- [Download All Artifacts](#Download-all-Artifacts)
- [Additional Documentation](#Additional-Documentation)
- [Contributions](#Contributions)
Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory.
## Upload an Artifact
Method Name: `uploadArtifact`
#### Inputs
- `name`
- The name of the artifact that is being uploaded
- Required
- `files`
- A list of file paths that describe what should be uploaded as part of the artifact
- If a path is provided that does not exist, an error will be thrown
- Can be absolute or relative. Internally everything is normalized and resolved
- Required
- `rootDirectory`
- A file path that denotes the root directory of the files being uploaded. This path is used to strip the paths provided in `files` to control how they are uploaded and structured
- If a file specified in `files` is not in the `rootDirectory`, an error will be thrown
- Required
- `options`
- Extra options that allow for the customization of the upload behavior
- Optional
#### Available Options
- `continueOnError`
- Indicates if the artifact upload should continue in the event a file fails to upload. If there is a error during upload, a partial artifact will always be created and available for download at the end. The `size` reported will be the amount of storage that the user or org will be charged for the partial artifact.
- If set to `false`, and an error is encountered, all other uploads will stop and any files that were queued will not be attempted to be uploaded. The partial artifact available will only include files up until the failure.
- If set to `true` and an error is encountered, the failed file will be skipped and ignored and all other queued files will be attempted to be uploaded. There will be an artifact available for download at the end with everything excluding the file that failed to upload
- Optional, defaults to `true` if not specified
- `retentionDays`
- Duration after which artifact will expire in days
- Minimum value: 1
- Maximum value: 90 unless changed by repository setting
- If this is set to a greater value than the retention settings allowed, the retention on artifacts will be reduced to match the max value allowed on the server, and the upload process will continue. An input of 0 assumes default retention value.
#### Example using Absolute File Paths
```js
const artifact = require('@actions/artifact');
const artifactClient = artifact.create()
const artifactName = 'my-artifact';
const files = [
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
]
const rootDirectory = '/home/user/files/plz-upload'
const options = {
continueOnError: true
}
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
```
#### Example using Relative File Paths
```js
// Assuming the current working directory is /home/user/files/plz-upload
const artifact = require('@actions/artifact');
const artifactClient = artifact.create()
const artifactName = 'my-artifact';
const files = [
'file1.txt',
'file2.txt',
'dir/file3.txt'
]
const rootDirectory = '.' // Also possible to use __dirname
const options = {
continueOnError: false
}
const uploadResponse = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
```
#### Upload Result
The returned `UploadResponse` will contain the following information
- `artifactName`
- The name of the artifact that was uploaded
- `artifactItems`
- A list of all files that describe what is uploaded if there are no errors encountered. Usually this will be equal to the inputted `files` with the exception of empty directories (will not be uploaded)
- `size`
- Total size of the artifact that was uploaded in bytes
- `failedItems`
- A list of items that were not uploaded successfully (this will include queued items that were not uploaded if `continueOnError` is set to false). This is a subset of `artifactItems`
## Download a Single Artifact
Method Name: `downloadArtifact`
#### Inputs
- `name`
- The name of the artifact to download
- Required
- `path`
- Path that denotes where the artifact will be downloaded to
- Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified
- `options`
- Extra options that allow for the customization of the download behavior
- Optional
#### Available Options
- `createArtifactFolder`
- Specifies if a folder (the artifact name) is created for the artifact that is downloaded (contents downloaded into this folder),
- Optional. Defaults to false if not specified
#### Example
```js
const artifact = require('@actions/artifact');
const artifactClient = artifact.create()
const artifactName = 'my-artifact';
const path = 'some/directory'
const options = {
createArtifactFolder: false
}
const downloadResponse = await artifactClient.downloadArtifact(artifactName, path, options)
// Post download, the directory structure will look like this
/some
/directory
/file1.txt
/file2.txt
/dir
/file3.txt
// If createArtifactFolder is set to true, the directory structure will look like this
/some
/directory
/my-artifact
/file1.txt
/file2.txt
/dir
/file3.txt
```
#### Download Response
The returned `DownloadResponse` will contain the following information
- `artifactName`
- The name of the artifact that was downloaded
- `downloadPath`
- The full Path to where the artifact was downloaded
## Download All Artifacts
Method Name: `downloadAllArtifacts`
#### Inputs
- `path`
- Path that denotes where the artifact will be downloaded to
- Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified
```js
const artifact = require('@actions/artifact');
const artifactClient = artifact.create();
const downloadResponse = await artifactClient.downloadAllArtifacts();
// output result
for (response in downloadResponse) {
console.log(response.artifactName);
console.log(response.downloadPath);
}
```
Because there are multiple artifacts, an extra directory (denoted by the name of the artifact) will be created for each artifact in the path. With 2 artifacts(`my-artifact-1` and `my-artifact-2` for example) and the default path, the directory structure will be as follows:
```js
/GITHUB_WORKSPACE
/my-artifact-1
/ .. contents of `my-artifact-1`
/my-artifact-2
/ .. contents of `my-artifact-2`
```
#### Download Result
An array will be returned that describes the results for downloading all artifacts. The number of items in the array indicates the number of artifacts that were downloaded.
Each artifact will have the same `DownloadResponse` as if it was individually downloaded
- `artifactName`
- The name of the artifact that was downloaded
- `downloadPath`
- The full Path to where the artifact was downloaded
## Additional Documentation
Check out [additional-information](docs/additional-information.md) for extra documentation around usage, restrictions and behavior.
Check out [implementation-details](docs/implementation-details.md) for extra information about the implementation of this package.
## Contributions
See [contributor guidelines](https://github.com/actions/toolkit/blob/main/.github/CONTRIBUTING.md) for general guidelines and information about toolkit contributions.
For contributions related to this package, see [artifact contributions](CONTRIBUTIONS.md) for more information.

51
node_modules/@actions/artifact-legacy/package.json generated vendored Normal file
View File

@@ -0,0 +1,51 @@
{
"name": "@actions/artifact",
"version": "1.1.2",
"preview": true,
"description": "Actions artifact lib",
"keywords": [
"github",
"actions",
"artifact"
],
"homepage": "https://github.com/actions/toolkit/tree/main/packages/artifact",
"license": "MIT",
"main": "lib/artifact-client.js",
"types": "lib/artifact-client.d.ts",
"directories": {
"lib": "lib",
"test": "__tests__"
},
"files": [
"lib",
"!.DS_Store"
],
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/toolkit.git",
"directory": "packages/artifact"
},
"scripts": {
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
"test": "echo \"Error: run tests from root\" && exit 1",
"bootstrap": "cd ../../ && npm run bootstrap",
"tsc-run": "tsc",
"tsc": "npm run bootstrap && npm run tsc-run"
},
"bugs": {
"url": "https://github.com/actions/toolkit/issues"
},
"dependencies": {
"@actions/core": "^1.9.1",
"@actions/http-client": "^2.0.1",
"tmp": "^0.2.1",
"tmp-promise": "^3.0.2"
},
"devDependencies": {
"@types/tmp": "^0.2.1",
"typescript": "^4.3.0"
}
}

View File

@@ -1,213 +1,192 @@
# `@actions/artifact`
## Usage
Interact programmatically with [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts).
You can use this package to interact with the actions artifacts.
- [Upload an Artifact](#Upload-an-Artifact)
- [Download a Single Artifact](#Download-a-Single-Artifact)
- [Download All Artifacts](#Download-all-Artifacts)
- [Additional Documentation](#Additional-Documentation)
- [Contributions](#Contributions)
This is the core library that powers the [`@actions/upload-artifact`](https://github.com/actions/upload-artifact) and [`@actions/download-artifact`](https://github.com/actions/download-artifact) actions.
Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory.
## Upload an Artifact
- [`@actions/artifact`](#actionsartifact)
- [v2 - What's New](#v2---whats-new)
- [Improvements](#improvements)
- [Breaking changes](#breaking-changes)
- [Quick Start](#quick-start)
- [Examples](#examples)
- [Upload and Download](#upload-and-download)
- [Delete an Artifact](#delete-an-artifact)
- [Downloading from other workflow runs or repos](#downloading-from-other-workflow-runs-or-repos)
- [Speeding up large uploads](#speeding-up-large-uploads)
- [Additional Resources](#additional-resources)
Method Name: `uploadArtifact`
## v2 - What's New
#### Inputs
- `name`
- The name of the artifact that is being uploaded
- Required
- `files`
- A list of file paths that describe what should be uploaded as part of the artifact
- If a path is provided that does not exist, an error will be thrown
- Can be absolute or relative. Internally everything is normalized and resolved
- Required
- `rootDirectory`
- A file path that denotes the root directory of the files being uploaded. This path is used to strip the paths provided in `files` to control how they are uploaded and structured
- If a file specified in `files` is not in the `rootDirectory`, an error will be thrown
- Required
- `options`
- Extra options that allow for the customization of the upload behavior
- Optional
> [!IMPORTANT]
> @actions/artifact v2+, upload-artifact@v4+, and download-artifact@v4+ are not currently supported on GHES yet. The previous version of this package can be found at [this tag](https://github.com/actions/toolkit/tree/@actions/artifact@1.1.2/packages/artifact) and [on npm](https://www.npmjs.com/package/@actions/artifact/v/1.1.2).
#### Available Options
The release of `@actions/artifact@v2` (including `upload-artifact@v4` and `download-artifact@v4`) are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
- `continueOnError`
- Indicates if the artifact upload should continue in the event a file fails to upload. If there is a error during upload, a partial artifact will always be created and available for download at the end. The `size` reported will be the amount of storage that the user or org will be charged for the partial artifact.
- If set to `false`, and an error is encountered, all other uploads will stop and any files that were queued will not be attempted to be uploaded. The partial artifact available will only include files up until the failure.
- If set to `true` and an error is encountered, the failed file will be skipped and ignored and all other queued files will be attempted to be uploaded. There will be an artifact available for download at the end with everything excluding the file that failed to upload
- Optional, defaults to `true` if not specified
- `retentionDays`
- Duration after which artifact will expire in days
- Minimum value: 1
- Maximum value: 90 unless changed by repository setting
- If this is set to a greater value than the retention settings allowed, the retention on artifacts will be reduced to match the max value allowed on the server, and the upload process will continue. An input of 0 assumes default retention value.
### Improvements
#### Example using Absolute File Paths
1. All upload and download operations are much quicker, up to 80% faster download times and 96% faster upload times in worst case scenarios.
2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized.
3. Artifacts can now be downloaded and deleted from the UI _before_ the entire workflow run finishes.
4. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs. Both of these factors help reduce the possibility of accidentally corrupting Artifact files. (Digest/integrity hash coming soon in the API!)
5. This library (and `actions/download-artifact`) now support downloading Artifacts from _other_ repositories and runs if a `GITHUB_TOKEN` with sufficient `actions:read` permissions are provided.
### Breaking changes
1. Firewall rules required for self-hosted runners.
If you are using self-hosted runners behind a firewall, you must have flows open to [Actions endpoints](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github). If you cannot use wildcard rules for your firewall, see the GitHub [meta endpoint](https://api.github.com/meta) for specific endpoints.
e.g.
```bash
curl https://api.github.com/meta | jq .domains.actions
```
2. Uploading to the same named Artifact multiple times.
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once.
3. Limit of Artifacts for an individual job.
Each job in a workflow run now has a limit of 10 artifacts.
## Quick Start
Install the package:
```bash
npm i @actions/artifact
```
Import the module:
```js
const artifact = require('@actions/artifact');
const artifactClient = artifact.create()
const artifactName = 'my-artifact';
const files = [
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
]
const rootDirectory = '/home/user/files/plz-upload'
const options = {
continueOnError: true
// ES6 module
import {DefaultArtifactClient} from '@actions/artifact'
// CommonJS
const {DefaultArtifactClient} = require('@actions/artifact')
```
Then instantiate:
```js
const artifact = new DefaultArtifactClient()
```
For a comprehensive list of classes, interfaces, functions and more, see the [generated documentation](./docs/generated/README.md).
## Examples
### Upload and Download
The most basic scenario is uploading one or more files to an Artifact, then downloading that Artifact. Downloads are based on the Artifact ID, which can be obtained in the response of `uploadArtifact`, `getArtifact`, `listArtifacts` or via the [REST API](https://docs.github.com/en/rest/actions/artifacts).
```js
const {id, size} = await artifact.uploadArtifact(
// name of the artifact
'my-artifact',
// files to include (supports absolute and relative paths)
['/absolute/path/file1.txt', './relative/file2.txt'],
{
// optional: how long to retain the artifact
// if unspecified, defaults to repository/org retention settings (the limit of this value)
retentionDays: 10
}
)
console.log(`Created artifact with id: ${id} (bytes: ${size}`)
const {downloadPath} = await artifact.downloadArtifact(id, {
// optional: download destination path. otherwise defaults to $GITHUB_WORKSPACE
path: '/tmp/dst/path',
})
console.log(`Downloaded artifact ${id} to: ${downloadPath}`)
```
### Delete an Artifact
To delete an artifact, all you need is the name.
```js
const {id} = await artifact.deleteArtifact(
// name of the artifact
'my-artifact'
)
console.log(`Deleted Artifact ID '${id}'`)
```
It also supports options to delete from other repos/runs given a github token with `actions:write` permissions on the target repository is supplied.
```js
const findBy = {
// must have actions:write permission on target repository
token: process.env['GITHUB_TOKEN'],
workflowRunId: 123,
repositoryOwner: 'actions',
repositoryName: 'toolkit'
}
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
const {id} = await artifact.deleteArtifact(
// name of the artifact
'my-artifact',
// options to find by other repo/owner
{ findBy }
)
console.log(`Deleted Artifact ID '${id}' from ${findBy.repositoryOwner}/ ${findBy.repositoryName}`)
```
#### Example using Relative File Paths
```js
// Assuming the current working directory is /home/user/files/plz-upload
const artifact = require('@actions/artifact');
const artifactClient = artifact.create()
const artifactName = 'my-artifact';
const files = [
'file1.txt',
'file2.txt',
'dir/file3.txt'
]
### Downloading from other workflow runs or repos
const rootDirectory = '.' // Also possible to use __dirname
const options = {
continueOnError: false
It may be useful to download Artifacts from other workflow runs, or even other repositories. By default, the permissions are scoped so they can only download Artifacts within the current workflow run. To elevate permissions for this scenario, you must specify `options.findBy` to `downloadArtifact`.
```ts
const findBy = {
// must have actions:read permission on target repository
token: process.env['GITHUB_TOKEN'],
workflowRunId: 123,
repositoryOwner: 'actions',
repositoryName: 'toolkit'
}
const uploadResponse = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
await artifact.downloadArtifact(1337, {
findBy
})
// can also be used in other methods
await artifact.getArtifact('my-artifact', {
findBy
})
await artifact.listArtifacts({
findBy
})
```
#### Upload Result
### Speeding up large uploads
The returned `UploadResponse` will contain the following information
If you have large files that need to be uploaded (or file types that don't compress well), you may benefit from changing the compression level of the Artifact archive. NOTE: This is a tradeoff between artifact upload time and stored data size.
- `artifactName`
- The name of the artifact that was uploaded
- `artifactItems`
- A list of all files that describe what is uploaded if there are no errors encountered. Usually this will be equal to the inputted `files` with the exception of empty directories (will not be uploaded)
- `size`
- Total size of the artifact that was uploaded in bytes
- `failedItems`
- A list of items that were not uploaded successfully (this will include queued items that were not uploaded if `continueOnError` is set to false). This is a subset of `artifactItems`
## Download a Single Artifact
Method Name: `downloadArtifact`
#### Inputs
- `name`
- The name of the artifact to download
- Required
- `path`
- Path that denotes where the artifact will be downloaded to
- Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified
- `options`
- Extra options that allow for the customization of the download behavior
- Optional
#### Available Options
- `createArtifactFolder`
- Specifies if a folder (the artifact name) is created for the artifact that is downloaded (contents downloaded into this folder),
- Optional. Defaults to false if not specified
#### Example
```js
const artifact = require('@actions/artifact');
const artifactClient = artifact.create()
const artifactName = 'my-artifact';
const path = 'some/directory'
const options = {
createArtifactFolder: false
}
const downloadResponse = await artifactClient.downloadArtifact(artifactName, path, options)
// Post download, the directory structure will look like this
/some
/directory
/file1.txt
/file2.txt
/dir
/file3.txt
// If createArtifactFolder is set to true, the directory structure will look like this
/some
/directory
/my-artifact
/file1.txt
/file2.txt
/dir
/file3.txt
```ts
await artifact.uploadArtifact('my-massive-artifact', ['big_file.bin'], {
// The level of compression for Zlib to be applied to the artifact archive.
// - 0: No compression
// - 1: Best speed
// - 6: Default compression (same as GNU Gzip)
// - 9: Best compression
compressionLevel: 0
})
```
#### Download Response
## Additional Resources
The returned `DownloadResponse` will contain the following information
- `artifactName`
- The name of the artifact that was downloaded
- `downloadPath`
- The full Path to where the artifact was downloaded
## Download All Artifacts
Method Name: `downloadAllArtifacts`
#### Inputs
- `path`
- Path that denotes where the artifact will be downloaded to
- Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified
```js
const artifact = require('@actions/artifact');
const artifactClient = artifact.create();
const downloadResponse = await artifactClient.downloadAllArtifacts();
// output result
for (response in downloadResponse) {
console.log(response.artifactName);
console.log(response.downloadPath);
}
```
Because there are multiple artifacts, an extra directory (denoted by the name of the artifact) will be created for each artifact in the path. With 2 artifacts(`my-artifact-1` and `my-artifact-2` for example) and the default path, the directory structure will be as follows:
```js
/GITHUB_WORKSPACE
/my-artifact-1
/ .. contents of `my-artifact-1`
/my-artifact-2
/ .. contents of `my-artifact-2`
```
#### Download Result
An array will be returned that describes the results for downloading all artifacts. The number of items in the array indicates the number of artifacts that were downloaded.
Each artifact will have the same `DownloadResponse` as if it was individually downloaded
- `artifactName`
- The name of the artifact that was downloaded
- `downloadPath`
- The full Path to where the artifact was downloaded
## Additional Documentation
Check out [additional-information](docs/additional-information.md) for extra documentation around usage, restrictions and behavior.
Check out [implementation-details](docs/implementation-details.md) for extra information about the implementation of this package.
## Contributions
See [contributor guidelines](https://github.com/actions/toolkit/blob/main/.github/CONTRIBUTING.md) for general guidelines and information about toolkit contributions.
For contributions related to this package, see [artifact contributions](CONTRIBUTIONS.md) for more information.
- [Releases](./RELEASES.md)
- [Contribution Guide](./CONTRIBUTIONS.md)
- [Frequently Asked Questions](./docs/faq.md)

6
node_modules/@actions/artifact/lib/artifact.d.ts generated vendored Normal file
View File

@@ -0,0 +1,6 @@
import { ArtifactClient } from './internal/client';
export * from './internal/shared/interfaces';
export * from './internal/shared/errors';
export * from './internal/client';
declare const client: ArtifactClient;
export default client;

23
node_modules/@actions/artifact/lib/artifact.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
const client_1 = require("./internal/client");
__exportStar(require("./internal/shared/interfaces"), exports);
__exportStar(require("./internal/shared/errors"), exports);
__exportStar(require("./internal/client"), exports);
const client = new client_1.DefaultArtifactClient();
exports.default = client;
//# sourceMappingURL=artifact.js.map

1
node_modules/@actions/artifact/lib/artifact.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"artifact.js","sourceRoot":"","sources":["../src/artifact.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8CAAuE;AAEvE,+DAA4C;AAC5C,2DAAwC;AACxC,oDAAiC;AAEjC,MAAM,MAAM,GAAmB,IAAI,8BAAqB,EAAE,CAAA;AAC1D,kBAAe,MAAM,CAAA"}

View File

@@ -0,0 +1,145 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* A Timestamp represents a point in time independent of any time zone
* or calendar, represented as seconds and fractions of seconds at
* nanosecond resolution in UTC Epoch time. It is encoded using the
* Proleptic Gregorian Calendar which extends the Gregorian calendar
* backwards to year one. It is encoded assuming all minutes are 60
* seconds long, i.e. leap seconds are "smeared" so that no leap second
* table is needed for interpretation. Range is from
* 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
* By restricting to that range, we ensure that we can convert to
* and from RFC 3339 date strings.
* See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
*
* # Examples
*
* Example 1: Compute Timestamp from POSIX `time()`.
*
* Timestamp timestamp;
* timestamp.set_seconds(time(NULL));
* timestamp.set_nanos(0);
*
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
*
* struct timeval tv;
* gettimeofday(&tv, NULL);
*
* Timestamp timestamp;
* timestamp.set_seconds(tv.tv_sec);
* timestamp.set_nanos(tv.tv_usec * 1000);
*
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
*
* FILETIME ft;
* GetSystemTimeAsFileTime(&ft);
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
*
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
* Timestamp timestamp;
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
*
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
*
* long millis = System.currentTimeMillis();
*
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
* .setNanos((int) ((millis % 1000) * 1000000)).build();
*
*
* Example 5: Compute Timestamp from current time in Python.
*
* timestamp = Timestamp()
* timestamp.GetCurrentTime()
*
* # JSON Mapping
*
* In JSON format, the Timestamp type is encoded as a string in the
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
* where {year} is always expressed using four digits while {month}, {day},
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
* is required. A proto3 JSON serializer should always use UTC (as indicated by
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
* able to accept both UTC and other timezones (as indicated by an offset).
*
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
* 01:30 UTC on January 15, 2017.
*
* In JavaScript, one can convert a Date object to this format using the
* standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
* method. In Python, a standard `datetime.datetime` object can be converted
* to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
* with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
* can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
* ) to obtain a formatter capable of generating timestamps in this format.
*
*
*
* @generated from protobuf message google.protobuf.Timestamp
*/
export interface Timestamp {
/**
* Represents seconds of UTC time since Unix epoch
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
* 9999-12-31T23:59:59Z inclusive.
*
* @generated from protobuf field: int64 seconds = 1;
*/
seconds: string;
/**
* Non-negative fractions of a second at nanosecond resolution. Negative
* second values with fractions must still have non-negative nanos values
* that count forward in time. Must be from 0 to 999,999,999
* inclusive.
*
* @generated from protobuf field: int32 nanos = 2;
*/
nanos: number;
}
declare class Timestamp$Type extends MessageType<Timestamp> {
constructor();
/**
* Creates a new `Timestamp` for the current time.
*/
now(): Timestamp;
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message: Timestamp): Date;
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date: Date): Timestamp;
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue;
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp;
create(value?: PartialMessage<Timestamp>): Timestamp;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp;
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
export declare const Timestamp: Timestamp$Type;
export {};

View File

@@ -0,0 +1,136 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Timestamp = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const runtime_6 = require("@protobuf-ts/runtime");
const runtime_7 = require("@protobuf-ts/runtime");
// @generated message type with reflection information, may provide speed optimized methods
class Timestamp$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Timestamp", [
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Creates a new `Timestamp` for the current time.
*/
now() {
const msg = this.create();
const ms = Date.now();
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message) {
return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
}
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date) {
const msg = this.create();
const ms = date.getTime();
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message, options) {
let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1000;
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (message.nanos < 0)
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
let z = "Z";
if (message.nanos > 0) {
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
if (nanosStr.substring(3) === "000000")
z = "." + nanosStr.substring(0, 3) + "Z";
else if (nanosStr.substring(6) === "000")
z = "." + nanosStr.substring(0, 6) + "Z";
else
z = "." + nanosStr + "Z";
}
return new Date(ms).toISOString().replace(".000Z", z);
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json, options, target) {
if (typeof json !== "string")
throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + ".");
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
if (!matches)
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
if (Number.isNaN(ms))
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (!target)
target = this.create();
target.seconds = runtime_6.PbLong.from(ms / 1000).toString();
target.nanos = 0;
if (matches[7])
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
return target;
}
create(value) {
const message = { seconds: "0", nanos: 0 };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 seconds */ 1:
message.seconds = reader.int64().toString();
break;
case /* int32 nanos */ 2:
message.nanos = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int64 seconds = 1; */
if (message.seconds !== "0")
writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds);
/* int32 nanos = 2; */
if (message.nanos !== 0)
writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
exports.Timestamp = new Timestamp$Type();
//# sourceMappingURL=timestamp.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,307 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* Wrapper message for `double`.
*
* The JSON representation for `DoubleValue` is JSON number.
*
* @generated from protobuf message google.protobuf.DoubleValue
*/
export interface DoubleValue {
/**
* The double value.
*
* @generated from protobuf field: double value = 1;
*/
value: number;
}
/**
* Wrapper message for `float`.
*
* The JSON representation for `FloatValue` is JSON number.
*
* @generated from protobuf message google.protobuf.FloatValue
*/
export interface FloatValue {
/**
* The float value.
*
* @generated from protobuf field: float value = 1;
*/
value: number;
}
/**
* Wrapper message for `int64`.
*
* The JSON representation for `Int64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.Int64Value
*/
export interface Int64Value {
/**
* The int64 value.
*
* @generated from protobuf field: int64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `uint64`.
*
* The JSON representation for `UInt64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.UInt64Value
*/
export interface UInt64Value {
/**
* The uint64 value.
*
* @generated from protobuf field: uint64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `int32`.
*
* The JSON representation for `Int32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.Int32Value
*/
export interface Int32Value {
/**
* The int32 value.
*
* @generated from protobuf field: int32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `uint32`.
*
* The JSON representation for `UInt32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.UInt32Value
*/
export interface UInt32Value {
/**
* The uint32 value.
*
* @generated from protobuf field: uint32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `bool`.
*
* The JSON representation for `BoolValue` is JSON `true` and `false`.
*
* @generated from protobuf message google.protobuf.BoolValue
*/
export interface BoolValue {
/**
* The bool value.
*
* @generated from protobuf field: bool value = 1;
*/
value: boolean;
}
/**
* Wrapper message for `string`.
*
* The JSON representation for `StringValue` is JSON string.
*
* @generated from protobuf message google.protobuf.StringValue
*/
export interface StringValue {
/**
* The string value.
*
* @generated from protobuf field: string value = 1;
*/
value: string;
}
/**
* Wrapper message for `bytes`.
*
* The JSON representation for `BytesValue` is JSON string.
*
* @generated from protobuf message google.protobuf.BytesValue
*/
export interface BytesValue {
/**
* The bytes value.
*
* @generated from protobuf field: bytes value = 1;
*/
value: Uint8Array;
}
declare class DoubleValue$Type extends MessageType<DoubleValue> {
constructor();
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue;
create(value?: PartialMessage<DoubleValue>): DoubleValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue;
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
export declare const DoubleValue: DoubleValue$Type;
declare class FloatValue$Type extends MessageType<FloatValue> {
constructor();
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue;
create(value?: PartialMessage<FloatValue>): FloatValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue;
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
export declare const FloatValue: FloatValue$Type;
declare class Int64Value$Type extends MessageType<Int64Value> {
constructor();
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value;
create(value?: PartialMessage<Int64Value>): Int64Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value;
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
export declare const Int64Value: Int64Value$Type;
declare class UInt64Value$Type extends MessageType<UInt64Value> {
constructor();
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value;
create(value?: PartialMessage<UInt64Value>): UInt64Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value;
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
export declare const UInt64Value: UInt64Value$Type;
declare class Int32Value$Type extends MessageType<Int32Value> {
constructor();
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value;
create(value?: PartialMessage<Int32Value>): Int32Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value;
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
export declare const Int32Value: Int32Value$Type;
declare class UInt32Value$Type extends MessageType<UInt32Value> {
constructor();
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value;
create(value?: PartialMessage<UInt32Value>): UInt32Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value;
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
export declare const UInt32Value: UInt32Value$Type;
declare class BoolValue$Type extends MessageType<BoolValue> {
constructor();
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue;
create(value?: PartialMessage<BoolValue>): BoolValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue;
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
export declare const BoolValue: BoolValue$Type;
declare class StringValue$Type extends MessageType<StringValue> {
constructor();
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue;
create(value?: PartialMessage<StringValue>): StringValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue;
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
export declare const StringValue: StringValue$Type;
declare class BytesValue$Type extends MessageType<BytesValue> {
constructor();
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue;
create(value?: PartialMessage<BytesValue>): BytesValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue;
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
export declare const BytesValue: BytesValue$Type;
export {};

View File

@@ -0,0 +1,609 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BytesValue = exports.StringValue = exports.BoolValue = exports.UInt32Value = exports.Int32Value = exports.UInt64Value = exports.Int64Value = exports.FloatValue = exports.DoubleValue = void 0;
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// Wrappers for primitive (non-message) types. These types are useful
// for embedding primitives in the `google.protobuf.Any` type and for places
// where we need to distinguish between the absence of a primitive
// typed field and its default value.
//
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const runtime_6 = require("@protobuf-ts/runtime");
const runtime_7 = require("@protobuf-ts/runtime");
// @generated message type with reflection information, may provide speed optimized methods
class DoubleValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.DoubleValue", [
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
]);
}
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
}
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* double value */ 1:
message.value = reader.double();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* double value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Bit64).double(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
exports.DoubleValue = new DoubleValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FloatValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.FloatValue", [
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
]);
}
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
}
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* float value */ 1:
message.value = reader.float();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* float value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Bit32).float(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
exports.FloatValue = new FloatValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int64Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Int64Value", [
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true);
}
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value");
return target;
}
create(value) {
const message = { value: "0" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 value */ 1:
message.value = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int64 value = 1; */
if (message.value !== "0")
writer.tag(1, runtime_3.WireType.Varint).int64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
exports.Int64Value = new Int64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt64Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.UInt64Value", [
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
]);
}
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true);
}
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value");
return target;
}
create(value) {
const message = { value: "0" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint64 value */ 1:
message.value = reader.uint64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* uint64 value = 1; */
if (message.value !== "0")
writer.tag(1, runtime_3.WireType.Varint).uint64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
exports.UInt64Value = new UInt64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int32Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Int32Value", [
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
}
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 5, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int32 value */ 1:
message.value = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int32 value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Varint).int32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
exports.Int32Value = new Int32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt32Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.UInt32Value", [
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
]);
}
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
}
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 13, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint32 value */ 1:
message.value = reader.uint32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* uint32 value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Varint).uint32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
exports.UInt32Value = new UInt32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BoolValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.BoolValue", [
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message, options) {
return message.value;
}
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 8, undefined, "value");
return target;
}
create(value) {
const message = { value: false };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool value */ 1:
message.value = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool value = 1; */
if (message.value !== false)
writer.tag(1, runtime_3.WireType.Varint).bool(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
exports.BoolValue = new BoolValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class StringValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.StringValue", [
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message, options) {
return message.value;
}
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 9, undefined, "value");
return target;
}
create(value) {
const message = { value: "" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string value */ 1:
message.value = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string value = 1; */
if (message.value !== "")
writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
exports.StringValue = new StringValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BytesValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.BytesValue", [
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
}
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 12, undefined, "value");
return target;
}
create(value) {
const message = { value: new Uint8Array(0) };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes value */ 1:
message.value = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bytes value = 1; */
if (message.value.length)
writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
exports.BytesValue = new BytesValue$Type();
//# sourceMappingURL=wrappers.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
export * from './google/protobuf/timestamp';
export * from './google/protobuf/wrappers';
export * from './results/api/v1/artifact';
export * from './results/api/v1/artifact.twirp';

21
node_modules/@actions/artifact/lib/generated/index.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
__exportStar(require("./google/protobuf/timestamp"), exports);
__exportStar(require("./google/protobuf/wrappers"), exports);
__exportStar(require("./results/api/v1/artifact"), exports);
__exportStar(require("./results/api/v1/artifact.twirp"), exports);
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/generated/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8DAA2C;AAC3C,6DAA0C;AAC1C,4DAAyC;AACzC,kEAA+C"}

View File

@@ -0,0 +1,336 @@
import { ServiceType } from "@protobuf-ts/runtime-rpc";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { Int64Value } from "../../../google/protobuf/wrappers";
import { StringValue } from "../../../google/protobuf/wrappers";
import { Timestamp } from "../../../google/protobuf/timestamp";
/**
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
*/
export interface CreateArtifactRequest {
/**
* @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
/**
* @generated from protobuf field: string name = 3;
*/
name: string;
/**
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 4;
*/
expiresAt?: Timestamp;
/**
* @generated from protobuf field: int32 version = 5;
*/
version: number;
}
/**
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactResponse
*/
export interface CreateArtifactResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* @generated from protobuf field: string signed_upload_url = 2;
*/
signedUploadUrl: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
*/
export interface FinalizeArtifactRequest {
/**
* @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
/**
* @generated from protobuf field: string name = 3;
*/
name: string;
/**
* @generated from protobuf field: int64 size = 4;
*/
size: string;
/**
* @generated from protobuf field: google.protobuf.StringValue hash = 5;
*/
hash?: StringValue;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
*/
export interface FinalizeArtifactResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* @generated from protobuf field: int64 artifact_id = 2;
*/
artifactId: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsRequest
*/
export interface ListArtifactsRequest {
/**
* The backend plan ID
*
* @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* The backend job ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
/**
* Name of the artifact to filter on
*
* @generated from protobuf field: google.protobuf.StringValue name_filter = 3;
*/
nameFilter?: StringValue;
/**
* Monolith Database ID of the artifact to filter on
*
* @generated from protobuf field: google.protobuf.Int64Value id_filter = 4;
*/
idFilter?: Int64Value;
}
/**
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse
*/
export interface ListArtifactsResponse {
/**
* @generated from protobuf field: repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1;
*/
artifacts: ListArtifactsResponse_MonolithArtifact[];
}
/**
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
*/
export interface ListArtifactsResponse_MonolithArtifact {
/**
* The backend plan ID
*
* @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* The backend job ID
*
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
/**
* Monolith database ID of the artifact
*
* @generated from protobuf field: int64 database_id = 3;
*/
databaseId: string;
/**
* Name of the artifact
*
* @generated from protobuf field: string name = 4;
*/
name: string;
/**
* Size of the artifact in bytes
*
* @generated from protobuf field: int64 size = 5;
*/
size: string;
/**
* When the artifact was created in the monolith
*
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
*/
createdAt?: Timestamp;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
*/
export interface GetSignedArtifactURLRequest {
/**
* @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
/**
* @generated from protobuf field: string name = 3;
*/
name: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
*/
export interface GetSignedArtifactURLResponse {
/**
* @generated from protobuf field: string signed_url = 1;
*/
signedUrl: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactRequest
*/
export interface DeleteArtifactRequest {
/**
* @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
*/
workflowJobRunBackendId: string;
/**
* @generated from protobuf field: string name = 3;
*/
name: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactResponse
*/
export interface DeleteArtifactResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* @generated from protobuf field: int64 artifact_id = 2;
*/
artifactId: string;
}
declare class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
constructor();
create(value?: PartialMessage<CreateArtifactRequest>): CreateArtifactRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateArtifactRequest): CreateArtifactRequest;
internalBinaryWrite(message: CreateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactRequest
*/
export declare const CreateArtifactRequest: CreateArtifactRequest$Type;
declare class CreateArtifactResponse$Type extends MessageType<CreateArtifactResponse> {
constructor();
create(value?: PartialMessage<CreateArtifactResponse>): CreateArtifactResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateArtifactResponse): CreateArtifactResponse;
internalBinaryWrite(message: CreateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactResponse
*/
export declare const CreateArtifactResponse: CreateArtifactResponse$Type;
declare class FinalizeArtifactRequest$Type extends MessageType<FinalizeArtifactRequest> {
constructor();
create(value?: PartialMessage<FinalizeArtifactRequest>): FinalizeArtifactRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeArtifactRequest): FinalizeArtifactRequest;
internalBinaryWrite(message: FinalizeArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
*/
export declare const FinalizeArtifactRequest: FinalizeArtifactRequest$Type;
declare class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse> {
constructor();
create(value?: PartialMessage<FinalizeArtifactResponse>): FinalizeArtifactResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeArtifactResponse): FinalizeArtifactResponse;
internalBinaryWrite(message: FinalizeArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
*/
export declare const FinalizeArtifactResponse: FinalizeArtifactResponse$Type;
declare class ListArtifactsRequest$Type extends MessageType<ListArtifactsRequest> {
constructor();
create(value?: PartialMessage<ListArtifactsRequest>): ListArtifactsRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsRequest): ListArtifactsRequest;
internalBinaryWrite(message: ListArtifactsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
*/
export declare const ListArtifactsRequest: ListArtifactsRequest$Type;
declare class ListArtifactsResponse$Type extends MessageType<ListArtifactsResponse> {
constructor();
create(value?: PartialMessage<ListArtifactsResponse>): ListArtifactsResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse): ListArtifactsResponse;
internalBinaryWrite(message: ListArtifactsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
*/
export declare const ListArtifactsResponse: ListArtifactsResponse$Type;
declare class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifactsResponse_MonolithArtifact> {
constructor();
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse_MonolithArtifact): ListArtifactsResponse_MonolithArtifact;
internalBinaryWrite(message: ListArtifactsResponse_MonolithArtifact, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
*/
export declare const ListArtifactsResponse_MonolithArtifact: ListArtifactsResponse_MonolithArtifact$Type;
declare class GetSignedArtifactURLRequest$Type extends MessageType<GetSignedArtifactURLRequest> {
constructor();
create(value?: PartialMessage<GetSignedArtifactURLRequest>): GetSignedArtifactURLRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLRequest): GetSignedArtifactURLRequest;
internalBinaryWrite(message: GetSignedArtifactURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
*/
export declare const GetSignedArtifactURLRequest: GetSignedArtifactURLRequest$Type;
declare class GetSignedArtifactURLResponse$Type extends MessageType<GetSignedArtifactURLResponse> {
constructor();
create(value?: PartialMessage<GetSignedArtifactURLResponse>): GetSignedArtifactURLResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLResponse): GetSignedArtifactURLResponse;
internalBinaryWrite(message: GetSignedArtifactURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
*/
export declare const GetSignedArtifactURLResponse: GetSignedArtifactURLResponse$Type;
declare class DeleteArtifactRequest$Type extends MessageType<DeleteArtifactRequest> {
constructor();
create(value?: PartialMessage<DeleteArtifactRequest>): DeleteArtifactRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactRequest): DeleteArtifactRequest;
internalBinaryWrite(message: DeleteArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
*/
export declare const DeleteArtifactRequest: DeleteArtifactRequest$Type;
declare class DeleteArtifactResponse$Type extends MessageType<DeleteArtifactResponse> {
constructor();
create(value?: PartialMessage<DeleteArtifactResponse>): DeleteArtifactResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactResponse): DeleteArtifactResponse;
internalBinaryWrite(message: DeleteArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
*/
export declare const DeleteArtifactResponse: DeleteArtifactResponse$Type;
/**
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
*/
export declare const ArtifactService: ServiceType;
export {};

Some files were not shown because too many files have changed in this diff Show More