mirror of
https://github.com/github/codeql-action.git
synced 2025-12-12 18:50:12 +08:00
Compare commits
31 Commits
v2.1.16
...
angelapwen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9cb33846e5 | ||
|
|
7f86ddc26d | ||
|
|
a758ec55e8 | ||
|
|
eeee462f05 | ||
|
|
5895ab0c0b | ||
|
|
44a27e6a51 | ||
|
|
a557279135 | ||
|
|
daaac4306e | ||
|
|
5229df1eef | ||
|
|
5da7870265 | ||
|
|
8a4a573d59 | ||
|
|
6630cbeccb | ||
|
|
af87cc6ba5 | ||
|
|
ebc59ec8da | ||
|
|
52de49c899 | ||
|
|
2c25894c5f | ||
|
|
2746051310 | ||
|
|
1016eba538 | ||
|
|
8171514c02 | ||
|
|
6fa0b7cb22 | ||
|
|
b8bd06e075 | ||
|
|
aa231930c1 | ||
|
|
ba95eeb60e | ||
|
|
c059f95c05 | ||
|
|
75afbf4a30 | ||
|
|
01fa64cb90 | ||
|
|
28ccb035bb | ||
|
|
11111290fc | ||
|
|
d8c9c723a5 | ||
|
|
8d24c9e4c1 | ||
|
|
c8971b8e0a |
@@ -1,5 +1,9 @@
|
|||||||
# CodeQL Action Changelog
|
# CodeQL Action Changelog
|
||||||
|
|
||||||
|
## [UNRELEASED]
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.10.1. [#1143](https://github.com/github/codeql-action/pull/1143)
|
||||||
|
|
||||||
## 2.1.16 - 13 Jul 2022
|
## 2.1.16 - 13 Jul 2022
|
||||||
|
|
||||||
- You can now quickly debug a job that uses the CodeQL Action by re-running the job from the GitHub UI and selecting the "Enable debug logging" option. [#1132](https://github.com/github/codeql-action/pull/1132)
|
- You can now quickly debug a job that uses the CodeQL Action by re-running the job from the GitHub UI and selecting the "Enable debug logging" option. [#1132](https://github.com/github/codeql-action/pull/1132)
|
||||||
|
|||||||
@@ -74,3 +74,4 @@ outputs:
|
|||||||
runs:
|
runs:
|
||||||
using: "node16"
|
using: "node16"
|
||||||
main: "../lib/analyze-action.js"
|
main: "../lib/analyze-action.js"
|
||||||
|
post: "../lib/analyze-action-post.js"
|
||||||
|
|||||||
@@ -78,3 +78,4 @@ outputs:
|
|||||||
runs:
|
runs:
|
||||||
using: 'node16'
|
using: 'node16'
|
||||||
main: '../lib/init-action.js'
|
main: '../lib/init-action.js'
|
||||||
|
post: '../lib/init-action-post.js'
|
||||||
33
lib/actions-util.js
generated
33
lib/actions-util.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -677,8 +677,33 @@ async function isAnalyzingDefaultBranch() {
|
|||||||
return currentRef === defaultBranch;
|
return currentRef === defaultBranch;
|
||||||
}
|
}
|
||||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||||
function sanitizeArifactName(name) {
|
async function printDebugLogs(config) {
|
||||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||||
|
for (const language of config.languages) {
|
||||||
|
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||||
|
const logsDirectory = path.join(databaseDirectory, "log");
|
||||||
|
if (!(0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||||
|
core.info(`Directory ${logsDirectory} does not exist.`);
|
||||||
|
continue; // Skip this language database.
|
||||||
|
}
|
||||||
|
const walkLogFiles = (dir) => {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
if (entries.length === 0) {
|
||||||
|
core.info(`No debug logs found at directory ${logsDirectory}.`);
|
||||||
|
}
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile()) {
|
||||||
|
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||||
|
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
else if (entry.isDirectory()) {
|
||||||
|
walkLogFiles(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
walkLogFiles(logsDirectory);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exports.sanitizeArifactName = sanitizeArifactName;
|
exports.printDebugLogs = printDebugLogs;
|
||||||
//# sourceMappingURL=actions-util.js.map
|
//# sourceMappingURL=actions-util.js.map
|
||||||
File diff suppressed because one or more lines are too long
6
lib/actions-util.test.js
generated
6
lib/actions-util.test.js
generated
@@ -497,10 +497,4 @@ on: ["push"]
|
|||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
|
||||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
|
||||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
|
||||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
|
||||||
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=actions-util.test.js.map
|
//# sourceMappingURL=actions-util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
55
lib/analyze-action-post.js
generated
Normal file
55
lib/analyze-action-post.js
generated
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
/**
|
||||||
|
* This file is the entry point for the `post:` hook of `analyze-action.yml`.
|
||||||
|
* It will run after the all steps in this job, in reverse order in relation to
|
||||||
|
* other `post:` hooks.
|
||||||
|
*/
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const config_utils_1 = require("./config-utils");
|
||||||
|
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||||
|
const logging_1 = require("./logging");
|
||||||
|
async function run(uploadSarifDebugArtifact) {
|
||||||
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
|
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
|
if (config === undefined) {
|
||||||
|
throw new Error("Config file could not be found at expected location. Did the 'init' action fail to start?");
|
||||||
|
}
|
||||||
|
// Upload Actions SARIF artifacts for debugging
|
||||||
|
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
||||||
|
core.info("Debug mode is on. Uploading available SARIF files as Actions debugging artifact...");
|
||||||
|
const outputDir = actionsUtil.getRequiredInput("output");
|
||||||
|
await uploadSarifDebugArtifact(config, outputDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function runWrapper() {
|
||||||
|
try {
|
||||||
|
await run(debugArtifacts.uploadSarifDebugArtifact);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.setFailed(`analyze action cleanup failed: ${error}`);
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
void runWrapper();
|
||||||
|
//# sourceMappingURL=analyze-action-post.js.map
|
||||||
1
lib/analyze-action-post.js.map
Normal file
1
lib/analyze-action-post.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,kEAAoD;AACpD,uCAA6C;AAE7C,KAAK,UAAU,GAAG,CAAC,wBAAkC;IACnD,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,+CAA+C;IAC/C,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,oFAAoF,CACrF,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,wBAAwB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;KACnD;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,CAAC,cAAc,CAAC,wBAAwB,CAAC,CAAC;KACpD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kCAAkC,KAAK,EAAE,CAAC,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
3
lib/analyze-action-post.test.js
generated
Normal file
3
lib/analyze-action-post.test.js
generated
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"use strict";
|
||||||
|
// TODO(angelapwen): Test run() here
|
||||||
|
//# sourceMappingURL=analyze-action-post.test.js.map
|
||||||
1
lib/analyze-action-post.test.js.map
Normal file
1
lib/analyze-action-post.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"analyze-action-post.test.js","sourceRoot":"","sources":["../src/analyze-action-post.test.ts"],"names":[],"mappings":";AAAA,oCAAoC"}
|
||||||
83
lib/analyze-action.js
generated
83
lib/analyze-action.js
generated
@@ -20,9 +20,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.runPromise = exports.sendStatusReport = void 0;
|
exports.runPromise = exports.sendStatusReport = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const artifact = __importStar(require("@actions/artifact"));
|
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
@@ -35,7 +32,6 @@ const logging_1 = require("./logging");
|
|||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const util_1 = require("./util");
|
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
async function sendStatusReport(startedAt, config, stats, error) {
|
async function sendStatusReport(startedAt, config, stats, error) {
|
||||||
@@ -83,27 +79,6 @@ async function run() {
|
|||||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, featureFlags);
|
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, featureFlags);
|
||||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||||
if (config.debugMode) {
|
|
||||||
// Upload the SARIF files as an Actions artifact for debugging
|
|
||||||
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
|
||||||
if (config.debugMode) {
|
|
||||||
// Upload the logs as an Actions artifact for debugging
|
|
||||||
let toUpload = [];
|
|
||||||
for (const language of config.languages) {
|
|
||||||
toUpload = toUpload.concat(listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
|
|
||||||
}
|
|
||||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
|
||||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
|
||||||
toUpload = toUpload.concat(listFolder(path.resolve(config.dbLocation, "log")));
|
|
||||||
}
|
|
||||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
|
||||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
|
||||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
|
||||||
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||||
@@ -144,42 +119,6 @@ async function run() {
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
|
||||||
try {
|
|
||||||
// Upload the database bundles as an Actions artifact for debugging
|
|
||||||
const toUpload = [];
|
|
||||||
for (const language of config.languages) {
|
|
||||||
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
|
|
||||||
}
|
|
||||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
console.log(`Failed to upload database debug bundles: ${error}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
|
||||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
|
||||||
for (const language of config.languages) {
|
|
||||||
const databaseDirectory = util.getCodeQLDatabasePath(config, language);
|
|
||||||
const logsDirectory = path.join(databaseDirectory, "log");
|
|
||||||
const walkLogFiles = (dir) => {
|
|
||||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
||||||
for (const entry of entries) {
|
|
||||||
if (entry.isFile()) {
|
|
||||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
|
||||||
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
|
||||||
core.endGroup();
|
|
||||||
}
|
|
||||||
else if (entry.isDirectory()) {
|
|
||||||
walkLogFiles(path.resolve(dir, entry.name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
walkLogFiles(logsDirectory);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (runStats && uploadResult) {
|
if (runStats && uploadResult) {
|
||||||
await sendStatusReport(startedAt, config, {
|
await sendStatusReport(startedAt, config, {
|
||||||
...runStats,
|
...runStats,
|
||||||
@@ -193,28 +132,6 @@ async function run() {
|
|||||||
await sendStatusReport(startedAt, config, undefined);
|
await sendStatusReport(startedAt, config, undefined);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
|
||||||
let suffix = "";
|
|
||||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
|
||||||
if (matrix !== undefined && matrix !== "null") {
|
|
||||||
for (const entry of Object.entries(JSON.parse(matrix)).sort())
|
|
||||||
suffix += `-${entry[1]}`;
|
|
||||||
}
|
|
||||||
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
|
||||||
}
|
|
||||||
function listFolder(dir) {
|
|
||||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
||||||
let files = [];
|
|
||||||
for (const entry of entries) {
|
|
||||||
if (entry.isFile()) {
|
|
||||||
files.push(path.resolve(dir, entry.name));
|
|
||||||
}
|
|
||||||
else if (entry.isDirectory()) {
|
|
||||||
files = files.concat(listFolder(path.resolve(dir, entry.name)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
exports.runPromise = run();
|
exports.runPromise = run();
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
try {
|
try {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
3
lib/analyze.js
generated
3
lib/analyze.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.createdDBForScannedLanguages = exports.CodeQLAnalysisError = void 0;
|
exports.runCleanup = exports.runFinalize = exports.runQueries = exports.dbIsFinalized = exports.createdDBForScannedLanguages = exports.CodeQLAnalysisError = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
@@ -96,6 +96,7 @@ function dbIsFinalized(config, language, logger) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
exports.dbIsFinalized = dbIsFinalized;
|
||||||
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags) {
|
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags) {
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
await createdDBForScannedLanguages(codeql, config, logger, featureFlags);
|
await createdDBForScannedLanguages(codeql, config, logger, featureFlags);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
152
lib/debug-artifacts.js
generated
Normal file
152
lib/debug-artifacts.js
generated
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.uploadDatabaseBundleDebugArtifact = exports.uploadLogsDebugArtifact = exports.uploadSarifDebugArtifact = exports.uploadDebugArtifacts = exports.sanitizeArifactName = void 0;
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const zlib_1 = __importDefault(require("zlib"));
|
||||||
|
const artifact = __importStar(require("@actions/artifact"));
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const adm_zip_1 = __importDefault(require("adm-zip"));
|
||||||
|
const del_1 = __importDefault(require("del"));
|
||||||
|
const actions_util_1 = require("./actions-util");
|
||||||
|
const analyze_1 = require("./analyze");
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
|
const util_1 = require("./util");
|
||||||
|
function sanitizeArifactName(name) {
|
||||||
|
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||||
|
}
|
||||||
|
exports.sanitizeArifactName = sanitizeArifactName;
|
||||||
|
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||||
|
if (toUpload.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let suffix = "";
|
||||||
|
const matrix = (0, actions_util_1.getRequiredInput)("matrix");
|
||||||
|
if (matrix) {
|
||||||
|
try {
|
||||||
|
for (const [, matrixVal] of Object.entries(JSON.parse(matrix)).sort())
|
||||||
|
suffix += `-${matrixVal}`;
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
core.info("Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await artifact.create().uploadArtifact(sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||||
|
}
|
||||||
|
exports.uploadDebugArtifacts = uploadDebugArtifacts;
|
||||||
|
async function uploadSarifDebugArtifact(config, outputDir) {
|
||||||
|
if (!(0, util_1.doesDirectoryExist)(outputDir)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let toUpload = [];
|
||||||
|
for (const lang of config.languages) {
|
||||||
|
const sarifFile = path.resolve(outputDir, `${lang}.sarif`);
|
||||||
|
if (fs.existsSync(sarifFile)) {
|
||||||
|
toUpload = toUpload.concat(sarifFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await uploadDebugArtifacts(toUpload, outputDir, config.debugArtifactName);
|
||||||
|
}
|
||||||
|
exports.uploadSarifDebugArtifact = uploadSarifDebugArtifact;
|
||||||
|
async function uploadLogsDebugArtifact(config) {
|
||||||
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
|
let toUpload = [];
|
||||||
|
for (const language of config.languages) {
|
||||||
|
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||||
|
const logsDirectory = path.resolve(databaseDirectory, "log");
|
||||||
|
if ((0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||||
|
toUpload = toUpload.concat((0, util_1.listFolder)(logsDirectory));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||||
|
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||||
|
const multiLanguageTracingLogsDirectory = path.resolve(config.dbLocation, "log");
|
||||||
|
if ((0, util_1.doesDirectoryExist)(multiLanguageTracingLogsDirectory)) {
|
||||||
|
toUpload = toUpload.concat((0, util_1.listFolder)(multiLanguageTracingLogsDirectory));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||||
|
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||||
|
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||||
|
const compoundBuildTracerLogDirectory = path.resolve(config.tempDir, "compound-build-tracer.log");
|
||||||
|
if ((0, util_1.doesDirectoryExist)(compoundBuildTracerLogDirectory)) {
|
||||||
|
await uploadDebugArtifacts([compoundBuildTracerLogDirectory], config.tempDir, config.debugArtifactName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.uploadLogsDebugArtifact = uploadLogsDebugArtifact;
|
||||||
|
/**
|
||||||
|
* If a database has not been finalized, we cannot run the `codeql database bundle`
|
||||||
|
* command in the CLI because it will return an error. Instead we directly zip
|
||||||
|
* all files in the database folder and upload it as an artifact.
|
||||||
|
*/
|
||||||
|
async function uploadPartialDatabaseBundle(config, language) {
|
||||||
|
const databasePath = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||||
|
const databaseBundlePath = path.resolve(config.dbLocation, `${config.debugDatabaseName}-${language}-partial.zip`);
|
||||||
|
core.info(`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...`);
|
||||||
|
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||||
|
if (fs.existsSync(databaseBundlePath)) {
|
||||||
|
await (0, del_1.default)(databaseBundlePath, { force: true });
|
||||||
|
}
|
||||||
|
const zip = new adm_zip_1.default();
|
||||||
|
zip.addLocalFolder(databasePath);
|
||||||
|
zip.writeZip(databaseBundlePath);
|
||||||
|
await uploadDebugArtifacts([databaseBundlePath], config.dbLocation, config.debugArtifactName);
|
||||||
|
}
|
||||||
|
async function uploadPartialDatabaseBundleZlib(config, language) {
|
||||||
|
const databasePath = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||||
|
const databaseBundlePath = path.resolve(config.dbLocation, `${config.debugDatabaseName}-${language}-partial.gz`);
|
||||||
|
core.info(`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...`);
|
||||||
|
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||||
|
if (fs.existsSync(databaseBundlePath)) {
|
||||||
|
await (0, del_1.default)(databaseBundlePath, { force: true });
|
||||||
|
}
|
||||||
|
const gzip = zlib_1.default.createGzip();
|
||||||
|
const outputStream = fs.createWriteStream(databaseBundlePath);
|
||||||
|
// Write all files in database folder to gz location
|
||||||
|
(0, util_1.listFolder)(databasePath).map((file) => {
|
||||||
|
const readStream = fs.createReadStream(file);
|
||||||
|
readStream.pipe(gzip).pipe(outputStream);
|
||||||
|
});
|
||||||
|
await uploadDebugArtifacts([databaseBundlePath], config.dbLocation, config.debugArtifactName);
|
||||||
|
}
|
||||||
|
async function uploadDatabaseBundleDebugArtifact(config, logger) {
|
||||||
|
for (const language of config.languages) {
|
||||||
|
if (!(0, analyze_1.dbIsFinalized)(config, language, logger)) {
|
||||||
|
await uploadPartialDatabaseBundleZlib(config, language);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
// Otherwise run `codeql database bundle` command.
|
||||||
|
const bundlePath = await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`);
|
||||||
|
await uploadDebugArtifacts([bundlePath], config.dbLocation, config.debugArtifactName);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.info(`Failed to upload database debug bundles for ${config.debugDatabaseName}-${language}: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.uploadDatabaseBundleDebugArtifact = uploadDatabaseBundleDebugArtifact;
|
||||||
|
//# sourceMappingURL=debug-artifacts.js.map
|
||||||
1
lib/debug-artifacts.js.map
Normal file
1
lib/debug-artifacts.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"debug-artifacts.js","sourceRoot":"","sources":["../src/debug-artifacts.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAC7B,gDAAwB;AAExB,4DAA8C;AAC9C,oDAAsC;AACtC,sDAA6B;AAC7B,8CAAsB;AAEtB,iDAAkD;AAClD,uCAA0C;AAC1C,qCAAiE;AAIjE,iCAMgB;AAEhB,SAAgB,mBAAmB,CAAC,IAAY;IAC9C,OAAO,IAAI,CAAC,OAAO,CAAC,oBAAoB,EAAE,EAAE,CAAC,CAAC;AAChD,CAAC;AAFD,kDAEC;AAEM,KAAK,UAAU,oBAAoB,CACxC,QAAkB,EAClB,OAAe,EACf,YAAoB;IAEpB,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;QACzB,OAAO;KACR;IACD,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,QAAQ,CAAC,CAAC;IAC1C,IAAI,MAAM,EAAE;QACV,IAAI;YACF,KAAK,MAAM,CAAC,EAAE,SAAS,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,EAAE;gBACnE,MAAM,IAAI,IAAI,SAAS,EAAE,CAAC;SAC7B;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,IAAI,CACP,+HAA+H,CAChI,CAAC;SACH;KACF;IACD,MAAM,QAAQ,CAAC,MAAM,EAAE,CAAC,cAAc,CACpC,mBAAmB,CAAC,GAAG,YAAY,GAAG,MAAM,EAAE,CAAC,EAC/C,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAC5C,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CACxB,CAAC;AACJ,CAAC;AAzBD,oDAyBC;AAEM,KAAK,UAAU,wBAAwB,CAC5C,MAAc,EACd,SAAiB;IAEjB,IAAI,CAAC,IAAA,yBAAkB,EAAC,SAAS,CAAC,EAAE;QAClC,OAAO;KACR;IAED,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,SAAS,EAAE;QACnC,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,IAAI,QAAQ,CAAC,CAAC;QAC3D,IAAI,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAC5B,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SACvC;KACF;IACD,MAAM,oBAAoB,CAAC,QAAQ,EAAE,SAAS,EAAE,MAAM,CAAC,iBAAiB,CAAC,CAAC;AAC5E,CAAC;AAhBD,4DAgBC;AAEM,KAAK,UAAU,uBAAuB,CAAC,MAAc;IAC1D,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,iBAAiB,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC7D,IAAI,IAAA,yBAAkB,EAAC,aAAa,CAAC,EAAE;YACrC,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAA,iBAAU,EAAC,aAAa,CAAC,CAAC,CAAC;SACvD;KACF;IAED,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,8EAA8E;QAC9E,MAAM,iCAAiC,GAAG,IAAI,CAAC,OAAO,CACpD,MAAM,CAAC,UAAU,EACjB,KAAK,CACN,CAAC;QACF,IAAI,IAAA,yBAAkB,EAAC,iCAAiC,CAAC,EAAE;YACzD,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,IAAA,iBAAU,EAAC,iCAAiC,CAAC,CAAC,CAAC;SAC3E;KACF;IACD,MAAM,oBAAoB,CACxB,QAAQ,EACR,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;IAEF,sFAAsF;IACtF,IAAI,CAAC,CAAC,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,CAAC,EAAE;QACnE,MAAM,+BAA+B,GAAG,IAAI,CAAC,OAAO,CAClD,MAAM,CAAC,OAAO,EACd,2BAA2B,CAC5B,CAAC;QACF,IAAI,IAAA,yBAAkB,EAAC,+BAA+B,CAAC,EAAE;YACvD,MAAM,oBAAoB,CACxB,CAAC,+BAA+B,CAAC,EACjC,MAAM,CAAC,OAAO,EACd,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;KACF;AACH,CAAC;AA1CD,0DA0CC;AAED;;;;GAIG;AACH,KAAK,UAAU,2BAA2B,CAAC,MAAc,EAAE,QAAkB;IAC3E,MAAM,YAAY,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC7D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CACrC,MAAM,CAAC,UAAU,EACjB,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,cAAc,CACtD,CAAC;IACF,IAAI,CAAC,IAAI,CACP,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,2DAA2D,kBAAkB,KAAK,CAC1H,CAAC;IACF,qEAAqE;IACrE,IAAI,EAAE,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACrC,MAAM,IAAA,aAAG,EAAC,kBAAkB,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;KAChD;IACD,MAAM,GAAG,GAAG,IAAI,iBAAM,EAAE,CAAC;IACzB,GAAG,CAAC,cAAc,CAAC,YAAY,CAAC,CAAC;IACjC,GAAG,CAAC,QAAQ,CAAC,kBAAkB,CAAC,CAAC;IACjC,MAAM,oBAAoB,CACxB,CAAC,kBAAkB,CAAC,EACpB,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,+BAA+B,CAC5C,MAAc,EACd,QAAkB;IAElB,MAAM,YAAY,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC7D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CACrC,MAAM,CAAC,UAAU,EACjB,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,aAAa,CACrD,CAAC;IACF,IAAI,CAAC,IAAI,CACP,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,2DAA2D,kBAAkB,KAAK,CAC1H,CAAC;IACF,qEAAqE;IACrE,IAAI,EAAE,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACrC,MAAM,IAAA,aAAG,EAAC,kBAAkB,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;KAChD;IACD,MAAM,IAAI,GAAG,cAAI,CAAC,UAAU,EAAE,CAAC;IAC/B,MAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,CAAC,kBAAkB,CAAC,CAAC;IAE9D,oDAAoD;IACpD,IAAA,iBAAU,EAAC,YAAY,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACpC,MAAM,UAAU,GAAG,EAAE,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;QAC7C,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC,CAAC,CAAC;IAEH,MAAM,oBAAoB,CACxB,CAAC,kBAAkB,CAAC,EACpB,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,iCAAiC,CACrD,MAAc,EACd,MAAc;IAEd,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,IAAA,uBAAa,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,EAAE;YAC5C,MAAM,+BAA+B,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;YACxD,SAAS;SACV;QACD,IAAI;YACF,kDAAkD;YAClD,MAAM,UAAU,GAAG,MAAM,IAAA,eAAQ,EAC/B,MAAM,EACN,QAAQ,EACR,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,EACjC,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,EAAE,CAC1C,CAAC;YACF,MAAM,oBAAoB,CACxB,CAAC,UAAU,CAAC,EACZ,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CACP,+CAA+C,MAAM,CAAC,iBAAiB,IAAI,QAAQ,KAAK,KAAK,EAAE,CAChG,CAAC;SACH;KACF;AACH,CAAC;AA5BD,8EA4BC"}
|
||||||
34
lib/debug-artifacts.test.js
generated
Normal file
34
lib/debug-artifacts.test.js
generated
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||||
|
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
||||||
|
t.deepEqual(debugArtifacts.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||||
|
t.deepEqual(debugArtifacts.sanitizeArifactName("hello`world`"), "helloworld");
|
||||||
|
t.deepEqual(debugArtifacts.sanitizeArifactName("hello===123"), "hello123");
|
||||||
|
t.deepEqual(debugArtifacts.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||||
|
});
|
||||||
|
// TODO(angelapwen): Test uploadDebugArtifacts if toUpload is empty
|
||||||
|
//# sourceMappingURL=debug-artifacts.test.js.map
|
||||||
1
lib/debug-artifacts.test.js.map
Normal file
1
lib/debug-artifacts.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AAEpD,IAAA,aAAI,EAAC,qBAAqB,EAAE,CAAC,CAAC,EAAE,EAAE;IAChC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,mBAAmB,CAAC,cAAc,CAAC,EAClD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,mBAAmB,CAAC,cAAc,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9E,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC3E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,mBAAmB,CAAC,yBAAyB,CAAC,EAC7D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,mEAAmE"}
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20220623"
|
"bundleVersion": "codeql-bundle-20220714"
|
||||||
}
|
}
|
||||||
|
|||||||
9
lib/feature-flags.js
generated
9
lib/feature-flags.js
generated
@@ -35,12 +35,17 @@ class GitHubFeatureFlags {
|
|||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
}
|
}
|
||||||
async getValue(flag) {
|
async getValue(flag) {
|
||||||
const response = (await this.getApiResponse())[flag];
|
const response = await this.getApiResponse();
|
||||||
if (response === undefined) {
|
if (response === undefined) {
|
||||||
|
this.logger.debug(`No feature flags API response for ${flag}, considering it disabled.`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const flagValue = response[flag];
|
||||||
|
if (flagValue === undefined) {
|
||||||
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
|
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return response;
|
return flagValue;
|
||||||
}
|
}
|
||||||
async getApiResponse() {
|
async getApiResponse() {
|
||||||
const loadApiResponse = async () => {
|
const loadApiResponse = async () => {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAGX;AAHD,WAAY,WAAW;IACrB,qEAAsD,CAAA;IACtD,mEAAoD,CAAA;AACtD,CAAC,EAHW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAGtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,CAAC,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QACrD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AAhED,gDAgEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAGX;AAHD,WAAY,WAAW;IACrB,qEAAsD,CAAA;IACtD,mEAAoD,CAAA;AACtD,CAAC,EAHW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAGtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,IAAI,4BAA4B,CACtE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AAvED,gDAuEC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
||||||
16
lib/feature-flags.test.js
generated
16
lib/feature-flags.test.js
generated
@@ -40,6 +40,22 @@ for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
(0, ava_1.default)("API response missing", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const loggedMessages = [];
|
||||||
|
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||||
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(403, {});
|
||||||
|
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||||
|
t.assert((await featureFlags.getValue(flag)) === false);
|
||||||
|
}
|
||||||
|
for (const featureFlag of ["ml_powered_queries_enabled"]) {
|
||||||
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
|
v.message ===
|
||||||
|
`No feature flags API response for ${featureFlag}, considering it disabled.`) !== undefined);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
|
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI,CAAC,4BAA4B,CAAC,EAAE;YACxD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,uBAAuB,CAAC,EACtE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,4BAA4B;IAC5B,2BAA2B;CAC5B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;gBACD,yBAAyB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACpD,2BAAW,CAAC,sBAAsB,CACnC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
|
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI,CAAC,4BAA4B,CAAC,EAAE;YACxD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,qCAAqC,WAAW,4BAA4B,CACjF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI,CAAC,4BAA4B,CAAC,EAAE;YACxD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CACjB,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,uBAAuB,CAAC,EACtE;YACE,OAAO,EACL,oFAAoF;SACvF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,4BAA4B;IAC5B,2BAA2B;CAC5B,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAgC,EAAE,CAAC;YAC7D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAgC;gBACtD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;gBACD,yBAAyB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACpD,2BAAW,CAAC,sBAAsB,CACnC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
|
||||||
56
lib/init-action-post.js
generated
Normal file
56
lib/init-action-post.js
generated
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"use strict";
|
||||||
|
/**
|
||||||
|
* This file is the entry point for the `post:` hook of `init-action.yml`.
|
||||||
|
* It will run after the all steps in this job, in reverse order in relation to
|
||||||
|
* other `post:` hooks.
|
||||||
|
*/
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const config_utils_1 = require("./config-utils");
|
||||||
|
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||||
|
const logging_1 = require("./logging");
|
||||||
|
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs) {
|
||||||
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
|
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
|
if (config === undefined) {
|
||||||
|
throw new Error("Config file could not be found at expected location. Did the 'init' action fail to start?");
|
||||||
|
}
|
||||||
|
// Upload appropriate Actions artifacts for debugging
|
||||||
|
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
||||||
|
core.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
|
||||||
|
await uploadDatabaseBundleDebugArtifact(config, logger);
|
||||||
|
await uploadLogsDebugArtifact(config);
|
||||||
|
await printDebugLogs(config);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function runWrapper() {
|
||||||
|
try {
|
||||||
|
await run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actionsUtil.printDebugLogs);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.setFailed(`init action cleanup failed: ${error}`);
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
void runWrapper();
|
||||||
|
//# sourceMappingURL=init-action-post.js.map
|
||||||
1
lib/init-action-post.js.map
Normal file
1
lib/init-action-post.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,kEAAoD;AACpD,uCAA6C;AAE7C,KAAK,UAAU,GAAG,CAChB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB;IAExB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,CACP,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,WAAW,CAAC,cAAc,CAC3B,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,+BAA+B,KAAK,EAAE,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
3
lib/init-action-post.test.js
generated
Normal file
3
lib/init-action-post.test.js
generated
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"use strict";
|
||||||
|
// TODO(angelapwen): Test run() here.
|
||||||
|
//# sourceMappingURL=init-action-post.test.js.map
|
||||||
1
lib/init-action-post.test.js.map
Normal file
1
lib/init-action-post.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"init-action-post.test.js","sourceRoot":"","sources":["../src/init-action-post.test.ts"],"names":[],"mappings":";AAAA,qCAAqC"}
|
||||||
32
lib/util.js
generated
32
lib/util.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
exports.listFolder = exports.doesDirectoryExist = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -638,4 +638,34 @@ function isInTestMode() {
|
|||||||
return process.env["TEST_MODE"] === "true" || false;
|
return process.env["TEST_MODE"] === "true" || false;
|
||||||
}
|
}
|
||||||
exports.isInTestMode = isInTestMode;
|
exports.isInTestMode = isInTestMode;
|
||||||
|
/*
|
||||||
|
* Returns whether the path in the argument represents an existing directory.
|
||||||
|
*/
|
||||||
|
function doesDirectoryExist(dirPath) {
|
||||||
|
try {
|
||||||
|
const stats = fs.lstatSync(dirPath);
|
||||||
|
return stats.isDirectory();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.doesDirectoryExist = doesDirectoryExist;
|
||||||
|
/**
|
||||||
|
* Returns a list of files in a given directory.
|
||||||
|
*/
|
||||||
|
function listFolder(dir) {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
let files = [];
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile()) {
|
||||||
|
files.push(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
else if (entry.isDirectory()) {
|
||||||
|
files = files.concat(listFolder(path.resolve(dir, entry.name)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
exports.listFolder = listFolder;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
File diff suppressed because one or more lines are too long
6
lib/util.test.js
generated
6
lib/util.test.js
generated
@@ -315,4 +315,10 @@ for (const [version, githubVersion, shouldReportWarning,] of CHECK_ACTION_VERSIO
|
|||||||
isActionsStub.restore();
|
isActionsStub.restore();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// TODO(angelapwen): Test doesDirectoryExist() returns true if directory
|
||||||
|
// TODO(angelapwen): Test doesDirectoryExist() returns false if file
|
||||||
|
// TODO(angelapwen): Test doesDirectoryExist() returns false if no file of this type exists
|
||||||
|
// TODO(angelapwen): Test listFolder() returns files in directory
|
||||||
|
// TODO(angelapwen): Test listFolder() returns empty if not a directory
|
||||||
|
// TODO(angelapwen): Test doesDirectoryExist() returns empty if directory is empty
|
||||||
//# sourceMappingURL=util.test.js.map
|
//# sourceMappingURL=util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
19
node_modules/.package-lock.json
generated
vendored
19
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.16",
|
"version": "2.1.17",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
@@ -464,6 +464,15 @@
|
|||||||
"integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==",
|
"integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/adm-zip": {
|
||||||
|
"version": "0.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.0.tgz",
|
||||||
|
"integrity": "sha512-FCJBJq9ODsQZUNURo5ILAQueuA8WJhRvuihS3ke2iI25mJlfV2LK8jG2Qj2z2AWg8U0FtWWqBHVRetceLskSaw==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/color-name": {
|
"node_modules/@types/color-name": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
||||||
@@ -799,6 +808,14 @@
|
|||||||
"node": ">=0.4.0"
|
"node": ">=0.4.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/adm-zip": {
|
||||||
|
"version": "0.5.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.9.tgz",
|
||||||
|
"integrity": "sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/aggregate-error": {
|
"node_modules/aggregate-error": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==",
|
"integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==",
|
||||||
|
|||||||
21
node_modules/@types/adm-zip/LICENSE
generated
vendored
Executable file
21
node_modules/@types/adm-zip/LICENSE
generated
vendored
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
||||||
16
node_modules/@types/adm-zip/README.md
generated
vendored
Executable file
16
node_modules/@types/adm-zip/README.md
generated
vendored
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
# Installation
|
||||||
|
> `npm install --save @types/adm-zip`
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
This package contains type definitions for adm-zip (https://github.com/cthackers/adm-zip).
|
||||||
|
|
||||||
|
# Details
|
||||||
|
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/adm-zip.
|
||||||
|
|
||||||
|
### Additional Details
|
||||||
|
* Last updated: Fri, 01 Apr 2022 08:01:43 GMT
|
||||||
|
* Dependencies: [@types/node](https://npmjs.com/package/@types/node)
|
||||||
|
* Global values: none
|
||||||
|
|
||||||
|
# Credits
|
||||||
|
These definitions were written by [John Vilk](https://github.com/jvilk), [Abner Oliveira](https://github.com/abner), [BendingBender](https://github.com/BendingBender), [Matthew Sainsbury](https://github.com/mattsains), and [Lei Nelissen](https://github.com/LeiNelissen).
|
||||||
380
node_modules/@types/adm-zip/index.d.ts
generated
vendored
Executable file
380
node_modules/@types/adm-zip/index.d.ts
generated
vendored
Executable file
@@ -0,0 +1,380 @@
|
|||||||
|
// Type definitions for adm-zip 0.5
|
||||||
|
// Project: https://github.com/cthackers/adm-zip
|
||||||
|
// Definitions by: John Vilk <https://github.com/jvilk>
|
||||||
|
// Abner Oliveira <https://github.com/abner>
|
||||||
|
// BendingBender <https://github.com/BendingBender>
|
||||||
|
// Matthew Sainsbury <https://github.com/mattsains>
|
||||||
|
// Lei Nelissen <https://github.com/LeiNelissen>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
|
||||||
|
/// <reference types="node" />
|
||||||
|
|
||||||
|
import * as FS from 'fs';
|
||||||
|
import { Constants } from './util';
|
||||||
|
|
||||||
|
declare class AdmZip {
|
||||||
|
/**
|
||||||
|
* @param fileNameOrRawData If provided, reads an existing archive. Otherwise creates a new, empty archive.
|
||||||
|
* @param options Options when initializing the ZIP file
|
||||||
|
*/
|
||||||
|
constructor(fileNameOrRawData?: string | Buffer, options?: Partial<AdmZip.InitOptions>);
|
||||||
|
/**
|
||||||
|
* Extracts the given entry from the archive and returns the content as a Buffer object
|
||||||
|
* @param entry ZipEntry object or String with the full path of the entry
|
||||||
|
* @param pass Password used for decrypting the file
|
||||||
|
* @return Buffer or Null in case of error
|
||||||
|
*/
|
||||||
|
readFile(entry: string | AdmZip.IZipEntry, pass?: string | Buffer): Buffer | null;
|
||||||
|
/**
|
||||||
|
* Asynchronous `readFile`.
|
||||||
|
* @param entry The full path of the entry or a `IZipEntry` object.
|
||||||
|
* @param callback Called with a `Buffer` or `null` in case of error.
|
||||||
|
*/
|
||||||
|
readFileAsync(entry: string | AdmZip.IZipEntry, callback: (data: Buffer | null, err: string) => void): void;
|
||||||
|
/**
|
||||||
|
* Extracts the given entry from the archive and returns the content as
|
||||||
|
* plain text in the given encoding.
|
||||||
|
* @param entry The full path of the entry or a `IZipEntry` object.
|
||||||
|
* @param encoding If no encoding is specified `"utf8"` is used.
|
||||||
|
*/
|
||||||
|
readAsText(fileName: string | AdmZip.IZipEntry, encoding?: string): string;
|
||||||
|
/**
|
||||||
|
* Asynchronous `readAsText`.
|
||||||
|
* @param entry The full path of the entry or a `IZipEntry` object.
|
||||||
|
* @param callback Called with the resulting string.
|
||||||
|
* @param encoding If no encoding is specified `"utf8"` is used.
|
||||||
|
*/
|
||||||
|
readAsTextAsync(
|
||||||
|
fileName: string | AdmZip.IZipEntry,
|
||||||
|
callback: (data: string, err: string) => void,
|
||||||
|
encoding?: string
|
||||||
|
): void;
|
||||||
|
/**
|
||||||
|
* Remove the entry from the file or the entry and all its nested directories
|
||||||
|
* and files if the given entry is a directory.
|
||||||
|
* @param entry The full path of the entry or a `IZipEntry` object.
|
||||||
|
*/
|
||||||
|
deleteFile(entry: string | AdmZip.IZipEntry): void;
|
||||||
|
/**
|
||||||
|
* Adds a comment to the zip. The zip must be rewritten after
|
||||||
|
* adding the comment.
|
||||||
|
* @param comment Content of the comment.
|
||||||
|
*/
|
||||||
|
addZipComment(comment: string): void;
|
||||||
|
/**
|
||||||
|
* @return The zip comment.
|
||||||
|
*/
|
||||||
|
getZipComment(): string;
|
||||||
|
/**
|
||||||
|
* Adds a comment to a specified file or `IZipEntry`. The zip must be rewritten after
|
||||||
|
* adding the comment.
|
||||||
|
* The comment cannot exceed 65535 characters in length.
|
||||||
|
* @param entry The full path of the entry or a `IZipEntry` object.
|
||||||
|
* @param comment The comment to add to the entry.
|
||||||
|
*/
|
||||||
|
addZipEntryComment(entry: string | AdmZip.IZipEntry, comment: string): void;
|
||||||
|
/**
|
||||||
|
* Returns the comment of the specified entry.
|
||||||
|
* @param entry The full path of the entry or a `IZipEntry` object.
|
||||||
|
* @return The comment of the specified entry.
|
||||||
|
*/
|
||||||
|
getZipEntryComment(entry: string | AdmZip.IZipEntry): string;
|
||||||
|
/**
|
||||||
|
* Updates the content of an existing entry inside the archive. The zip
|
||||||
|
* must be rewritten after updating the content.
|
||||||
|
* @param entry The full path of the entry or a `IZipEntry` object.
|
||||||
|
* @param content The entry's new contents.
|
||||||
|
*/
|
||||||
|
updateFile(entry: string | AdmZip.IZipEntry, content: Buffer): void;
|
||||||
|
/**
|
||||||
|
* Adds a file from the disk to the archive.
|
||||||
|
* @param localPath Path to a file on disk.
|
||||||
|
* @param zipPath Path to a directory in the archive. Defaults to the empty
|
||||||
|
* string.
|
||||||
|
* @param zipName Name for the file.
|
||||||
|
* @param comment Comment to be attached to the file
|
||||||
|
*/
|
||||||
|
addLocalFile(localPath: string, zipPath?: string, zipName?: string, comment?: string): void;
|
||||||
|
/**
|
||||||
|
* Adds a local directory and all its nested files and directories to the
|
||||||
|
* archive.
|
||||||
|
* @param localPath Path to a folder on disk.
|
||||||
|
* @param zipPath Path to a folder in the archive. Default: `""`.
|
||||||
|
* @param filter RegExp or Function if files match will be included.
|
||||||
|
*/
|
||||||
|
addLocalFolder(localPath: string, zipPath?: string, filter?: RegExp | ((filename: string) => boolean)): void;
|
||||||
|
/**
|
||||||
|
* Asynchronous addLocalFile
|
||||||
|
* @param localPath
|
||||||
|
* @param callback
|
||||||
|
* @param zipPath optional path inside zip
|
||||||
|
* @param filter optional RegExp or Function if files match will
|
||||||
|
* be included.
|
||||||
|
*/
|
||||||
|
addLocalFolderAsync(
|
||||||
|
localPath: string,
|
||||||
|
callback: (success?: boolean, err?: string) => void,
|
||||||
|
zipPath?: string,
|
||||||
|
filter?: RegExp | ((filename: string) => boolean)
|
||||||
|
): void;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param localPath - path where files will be extracted
|
||||||
|
* @param props - optional properties
|
||||||
|
* @param props.zipPath - optional path inside zip
|
||||||
|
* @param props.filter - RegExp or Function if files match will be included.
|
||||||
|
*/
|
||||||
|
addLocalFolderPromise(
|
||||||
|
localPath: string,
|
||||||
|
props: { zipPath?: string, filter?: RegExp | ((filename: string) => boolean) }
|
||||||
|
): Promise<void>;
|
||||||
|
/**
|
||||||
|
* Allows you to create a entry (file or directory) in the zip file.
|
||||||
|
* If you want to create a directory the `entryName` must end in `"/"` and a `null`
|
||||||
|
* buffer should be provided.
|
||||||
|
* @param entryName Entry path.
|
||||||
|
* @param content Content to add to the entry; must be a 0-length buffer
|
||||||
|
* for a directory.
|
||||||
|
* @param comment Comment to add to the entry.
|
||||||
|
* @param attr Attribute to add to the entry.
|
||||||
|
*/
|
||||||
|
addFile(entryName: string, content: Buffer, comment?: string, attr?: number): void;
|
||||||
|
/**
|
||||||
|
* Returns an array of `IZipEntry` objects representing the files and folders
|
||||||
|
* inside the archive.
|
||||||
|
*/
|
||||||
|
getEntries(): AdmZip.IZipEntry[];
|
||||||
|
/**
|
||||||
|
* Returns a `IZipEntry` object representing the file or folder specified by `name`.
|
||||||
|
* @param name Name of the file or folder to retrieve.
|
||||||
|
* @return The entry corresponding to the `name`.
|
||||||
|
*/
|
||||||
|
getEntry(name: string): AdmZip.IZipEntry | null;
|
||||||
|
/**
|
||||||
|
* Returns the number of entries in the ZIP
|
||||||
|
* @return The amount of entries in the ZIP
|
||||||
|
*/
|
||||||
|
getEntryCount(): number;
|
||||||
|
/**
|
||||||
|
* Loop through each entry in the ZIP
|
||||||
|
* @param callback The callback that receives each individual entry
|
||||||
|
*/
|
||||||
|
forEach(callback: (entry: AdmZip.IZipEntry) => void): void;
|
||||||
|
/**
|
||||||
|
* Extracts the given entry to the given `targetPath`.
|
||||||
|
* If the entry is a directory inside the archive, the entire directory and
|
||||||
|
* its subdirectories will be extracted.
|
||||||
|
* @param entry The full path of the entry or a `IZipEntry` object.
|
||||||
|
* @param targetPath Target folder where to write the file.
|
||||||
|
* @param maintainEntryPath If maintainEntryPath is `true` and the entry is
|
||||||
|
* inside a folder, the entry folder will be created in `targetPath` as
|
||||||
|
* well. Default: `true`.
|
||||||
|
* @param overwrite If the file already exists at the target path, the file
|
||||||
|
* will be overwriten if this is `true`. Default: `false`.
|
||||||
|
* @param keepOriginalPermission The file will be set as the permission from
|
||||||
|
* the entry if this is true. Default: `false`.
|
||||||
|
* @param outFileName String If set will override the filename of the
|
||||||
|
* extracted file (Only works if the entry is a file)
|
||||||
|
* @return Boolean
|
||||||
|
*/
|
||||||
|
extractEntryTo(
|
||||||
|
entryPath: string | AdmZip.IZipEntry,
|
||||||
|
targetPath: string,
|
||||||
|
maintainEntryPath?: boolean,
|
||||||
|
overwrite?: boolean,
|
||||||
|
keepOriginalPermission?: boolean,
|
||||||
|
outFileName?: string,
|
||||||
|
): boolean;
|
||||||
|
/**
|
||||||
|
* Test the archive
|
||||||
|
* @param password The password for the archive
|
||||||
|
*/
|
||||||
|
test(password?: string | Buffer): boolean;
|
||||||
|
/**
|
||||||
|
* Extracts the entire archive to the given location.
|
||||||
|
* @param targetPath Target location.
|
||||||
|
* @param overwrite If the file already exists at the target path, the file
|
||||||
|
* will be overwriten if this is `true`. Default: `false`.
|
||||||
|
* @param keepOriginalPermission The file will be set as the permission from
|
||||||
|
* the entry if this is true. Default: `false`.
|
||||||
|
* @param password The password for the archive
|
||||||
|
*/
|
||||||
|
extractAllTo(
|
||||||
|
targetPath: string,
|
||||||
|
overwrite?: boolean,
|
||||||
|
keepOriginalPermission?: boolean,
|
||||||
|
password?: string | Buffer
|
||||||
|
): void;
|
||||||
|
/**
|
||||||
|
* Extracts the entire archive to the given location.
|
||||||
|
* @param targetPath Target location.
|
||||||
|
* @param overwrite If the file already exists at the target path, the file
|
||||||
|
* will be overwriten if this is `true`. Default: `false`.
|
||||||
|
* @param keepOriginalPermission The file will be set as the permission from
|
||||||
|
* the entry if this is true. Default: `false`.
|
||||||
|
* @param callback The callback function will be called after extraction.
|
||||||
|
*/
|
||||||
|
extractAllToAsync(
|
||||||
|
targetPath: string,
|
||||||
|
overwrite?: boolean,
|
||||||
|
keepOriginalPermission?: boolean,
|
||||||
|
callback?: (error?: Error) => void,
|
||||||
|
): void;
|
||||||
|
/**
|
||||||
|
* Writes the newly created zip file to disk at the specified location or
|
||||||
|
* if a zip was opened and no `targetFileName` is provided, it will
|
||||||
|
* overwrite the opened zip.
|
||||||
|
*/
|
||||||
|
writeZip(targetFileName?: string, callback?: (error: Error | null) => void): void;
|
||||||
|
/**
|
||||||
|
* Writes the newly created zip file to disk at the specified location or
|
||||||
|
* if a zip was opened and no `targetFileName` is provided, it will
|
||||||
|
* overwrite the opened zip.
|
||||||
|
*/
|
||||||
|
writeZipPromise(
|
||||||
|
targetFileName?: string,
|
||||||
|
props?: { overwrite?: boolean, perm?: number }
|
||||||
|
): Promise<boolean>;
|
||||||
|
/**
|
||||||
|
* Returns the content of the entire zip file.
|
||||||
|
*/
|
||||||
|
toBuffer(): Buffer;
|
||||||
|
/**
|
||||||
|
* Asynchronously returns the content of the entire zip file.
|
||||||
|
* @param onSuccess called with the content of the zip file, once it has been generated.
|
||||||
|
* @param onFail unused.
|
||||||
|
* @param onItemStart called before an entry is compressed.
|
||||||
|
* @param onItemEnd called after an entry is compressed.
|
||||||
|
*/
|
||||||
|
toBuffer(
|
||||||
|
onSuccess: (buffer: Buffer) => void,
|
||||||
|
onFail?: (...args: any[]) => void,
|
||||||
|
onItemStart?: (name: string) => void,
|
||||||
|
onItemEnd?: (name: string) => void,
|
||||||
|
): void;
|
||||||
|
/**
|
||||||
|
* Asynchronously convert the promise to a Buffer
|
||||||
|
*/
|
||||||
|
toBufferPromise(): Promise<Buffer>;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare namespace AdmZip {
|
||||||
|
/**
|
||||||
|
* The `IZipEntry` is more than a structure representing the entry inside the
|
||||||
|
* zip file. Beside the normal attributes and headers a entry can have, the
|
||||||
|
* class contains a reference to the part of the file where the compressed
|
||||||
|
* data resides and decompresses it when requested. It also compresses the
|
||||||
|
* data and creates the headers required to write in the zip file.
|
||||||
|
*/
|
||||||
|
// disable warning about the I-prefix in interface name to prevent breaking stuff for users without a major bump
|
||||||
|
// tslint:disable-next-line:interface-name
|
||||||
|
interface IZipEntry {
|
||||||
|
/**
|
||||||
|
* Represents the full name and path of the file
|
||||||
|
*/
|
||||||
|
entryName: string;
|
||||||
|
readonly rawEntryName: Buffer;
|
||||||
|
/**
|
||||||
|
* Extra data associated with this entry.
|
||||||
|
*/
|
||||||
|
extra: Buffer;
|
||||||
|
/**
|
||||||
|
* Entry comment.
|
||||||
|
*/
|
||||||
|
comment: string;
|
||||||
|
readonly name: string;
|
||||||
|
/**
|
||||||
|
* Read-Only property that indicates the type of the entry.
|
||||||
|
*/
|
||||||
|
readonly isDirectory: boolean;
|
||||||
|
/**
|
||||||
|
* Get the header associated with this ZipEntry.
|
||||||
|
*/
|
||||||
|
readonly header: EntryHeader;
|
||||||
|
attr: number;
|
||||||
|
/**
|
||||||
|
* Retrieve the compressed data for this entry. Note that this may trigger
|
||||||
|
* compression if any properties were modified.
|
||||||
|
*/
|
||||||
|
getCompressedData(): Buffer;
|
||||||
|
/**
|
||||||
|
* Asynchronously retrieve the compressed data for this entry. Note that
|
||||||
|
* this may trigger compression if any properties were modified.
|
||||||
|
*/
|
||||||
|
getCompressedDataAsync(callback: (data: Buffer) => void): void;
|
||||||
|
/**
|
||||||
|
* Set the (uncompressed) data to be associated with this entry.
|
||||||
|
*/
|
||||||
|
setData(value: string | Buffer): void;
|
||||||
|
/**
|
||||||
|
* Get the decompressed data associated with this entry.
|
||||||
|
*/
|
||||||
|
getData(): Buffer;
|
||||||
|
/**
|
||||||
|
* Asynchronously get the decompressed data associated with this entry.
|
||||||
|
*/
|
||||||
|
getDataAsync(callback: (data: Buffer, err: string) => void): void;
|
||||||
|
/**
|
||||||
|
* Returns the CEN Entry Header to be written to the output zip file, plus
|
||||||
|
* the extra data and the entry comment.
|
||||||
|
*/
|
||||||
|
packHeader(): Buffer;
|
||||||
|
/**
|
||||||
|
* Returns a nicely formatted string with the most important properties of
|
||||||
|
* the ZipEntry.
|
||||||
|
*/
|
||||||
|
toString(): string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface EntryHeader {
|
||||||
|
made: number;
|
||||||
|
version: number;
|
||||||
|
flags: number;
|
||||||
|
method: number;
|
||||||
|
time: Date;
|
||||||
|
crc: number;
|
||||||
|
compressedSize: number;
|
||||||
|
size: number;
|
||||||
|
fileNameLength: number;
|
||||||
|
extraLength: number;
|
||||||
|
commentLength: number;
|
||||||
|
diskNumStart: number;
|
||||||
|
inAttr: number;
|
||||||
|
attr: number;
|
||||||
|
offset: number;
|
||||||
|
readonly encripted: boolean;
|
||||||
|
readonly entryHeaderSize: number;
|
||||||
|
readonly realDataOffset: number;
|
||||||
|
readonly dataHeader: DataHeader;
|
||||||
|
loadDataHeaderFromBinary(data: Buffer): void;
|
||||||
|
loadFromBinary(data: Buffer): void;
|
||||||
|
dataHeaderToBinary(): Buffer;
|
||||||
|
entryHeaderToBinary(): Buffer;
|
||||||
|
toString(): string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DataHeader {
|
||||||
|
version: number;
|
||||||
|
flags: number;
|
||||||
|
method: number;
|
||||||
|
time: number;
|
||||||
|
crc: number;
|
||||||
|
compressedSize: number;
|
||||||
|
size: number;
|
||||||
|
fnameLen: number;
|
||||||
|
extraLen: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface InitOptions {
|
||||||
|
/* If true it disables files sorting */
|
||||||
|
noSort: boolean;
|
||||||
|
/* Read entries during load (initial loading may be slower) */
|
||||||
|
readEntries: boolean;
|
||||||
|
/* Read method */
|
||||||
|
method: typeof Constants[keyof typeof Constants] | number;
|
||||||
|
/* file system */
|
||||||
|
fs: null | typeof FS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export = AdmZip;
|
||||||
47
node_modules/@types/adm-zip/package.json
generated
vendored
Executable file
47
node_modules/@types/adm-zip/package.json
generated
vendored
Executable file
@@ -0,0 +1,47 @@
|
|||||||
|
{
|
||||||
|
"name": "@types/adm-zip",
|
||||||
|
"version": "0.5.0",
|
||||||
|
"description": "TypeScript definitions for adm-zip",
|
||||||
|
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/adm-zip",
|
||||||
|
"license": "MIT",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "John Vilk",
|
||||||
|
"url": "https://github.com/jvilk",
|
||||||
|
"githubUsername": "jvilk"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Abner Oliveira",
|
||||||
|
"url": "https://github.com/abner",
|
||||||
|
"githubUsername": "abner"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "BendingBender",
|
||||||
|
"url": "https://github.com/BendingBender",
|
||||||
|
"githubUsername": "BendingBender"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Matthew Sainsbury",
|
||||||
|
"url": "https://github.com/mattsains",
|
||||||
|
"githubUsername": "mattsains"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Lei Nelissen",
|
||||||
|
"url": "https://github.com/LeiNelissen",
|
||||||
|
"githubUsername": "LeiNelissen"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main": "",
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||||
|
"directory": "types/adm-zip"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
},
|
||||||
|
"typesPublisherContentHash": "e7f9407982926b4743453eb7e13bc93b66f3e599200f0d101fe305016af41e50",
|
||||||
|
"typeScriptVersion": "3.9"
|
||||||
|
}
|
||||||
142
node_modules/@types/adm-zip/util.d.ts
generated
vendored
Executable file
142
node_modules/@types/adm-zip/util.d.ts
generated
vendored
Executable file
@@ -0,0 +1,142 @@
|
|||||||
|
export const Constants: {
|
||||||
|
/* The local file header */
|
||||||
|
LOCHDR: 30; // LOC header size
|
||||||
|
LOCSIG: 0x04034b50; // "PK\003\004"
|
||||||
|
LOCVER: 4; // version needed to extract
|
||||||
|
LOCFLG: 6; // general purpose bit flag
|
||||||
|
LOCHOW: 8; // compression method
|
||||||
|
LOCTIM: 10; // modification time (2 bytes time, 2 bytes date)
|
||||||
|
LOCCRC: 14; // uncompressed file crc-32 value
|
||||||
|
LOCSIZ: 18; // compressed size
|
||||||
|
LOCLEN: 22; // uncompressed size
|
||||||
|
LOCNAM: 26; // filename length
|
||||||
|
LOCEXT: 28; // extra field length
|
||||||
|
|
||||||
|
/* The Data descriptor */
|
||||||
|
EXTSIG: 0x08074b50; // "PK\007\008"
|
||||||
|
EXTHDR: 16; // EXT header size
|
||||||
|
EXTCRC: 4; // uncompressed file crc-32 value
|
||||||
|
EXTSIZ: 8; // compressed size
|
||||||
|
EXTLEN: 12; // uncompressed size
|
||||||
|
|
||||||
|
/* The central directory file header */
|
||||||
|
CENHDR: 46; // CEN header size
|
||||||
|
CENSIG: 0x02014b50; // "PK\001\002"
|
||||||
|
CENVEM: 4; // version made by
|
||||||
|
CENVER: 6; // version needed to extract
|
||||||
|
CENFLG: 8; // encrypt, decrypt flags
|
||||||
|
CENHOW: 10; // compression method
|
||||||
|
CENTIM: 12; // modification time (2 bytes time, 2 bytes date)
|
||||||
|
CENCRC: 16; // uncompressed file crc-32 value
|
||||||
|
CENSIZ: 20; // compressed size
|
||||||
|
CENLEN: 24; // uncompressed size
|
||||||
|
CENNAM: 28; // filename length
|
||||||
|
CENEXT: 30; // extra field length
|
||||||
|
CENCOM: 32; // file comment length
|
||||||
|
CENDSK: 34; // volume number start
|
||||||
|
CENATT: 36; // internal file attributes
|
||||||
|
CENATX: 38; // external file attributes (host system dependent)
|
||||||
|
CENOFF: 42; // LOC header offset
|
||||||
|
|
||||||
|
/* The entries in the end of central directory */
|
||||||
|
ENDHDR: 22; // END header size
|
||||||
|
ENDSIG: 0x06054b50; // "PK\005\006"
|
||||||
|
ENDSUB: 8; // number of entries on this disk
|
||||||
|
ENDTOT: 10; // total number of entries
|
||||||
|
ENDSIZ: 12; // central directory size in bytes
|
||||||
|
ENDOFF: 16; // offset of first CEN header
|
||||||
|
ENDCOM: 20; // zip file comment length
|
||||||
|
|
||||||
|
END64HDR: 20; // zip64 END header size
|
||||||
|
END64SIG: 0x07064b50; // zip64 Locator signature, "PK\006\007"
|
||||||
|
END64START: 4; // number of the disk with the start of the zip64
|
||||||
|
END64OFF: 8; // relative offset of the zip64 end of central directory
|
||||||
|
END64NUMDISKS: 16; // total number of disks
|
||||||
|
|
||||||
|
ZIP64SIG: 0x06064b50; // zip64 signature, "PK\006\006"
|
||||||
|
ZIP64HDR: 56; // zip64 record minimum size
|
||||||
|
ZIP64LEAD: 12; // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
|
||||||
|
ZIP64SIZE: 4; // zip64 size of the central directory record
|
||||||
|
ZIP64VEM: 12; // zip64 version made by
|
||||||
|
ZIP64VER: 14; // zip64 version needed to extract
|
||||||
|
ZIP64DSK: 16; // zip64 number of this disk
|
||||||
|
ZIP64DSKDIR: 20; // number of the disk with the start of the record directory
|
||||||
|
ZIP64SUB: 24; // number of entries on this disk
|
||||||
|
ZIP64TOT: 32; // total number of entries
|
||||||
|
ZIP64SIZB: 40; // zip64 central directory size in bytes
|
||||||
|
ZIP64OFF: 48; // offset of start of central directory with respect to the starting disk number
|
||||||
|
ZIP64EXTRA: 56; // extensible data sector
|
||||||
|
|
||||||
|
/* Compression methods */
|
||||||
|
STORED: 0; // no compression
|
||||||
|
SHRUNK: 1; // shrunk
|
||||||
|
REDUCED1: 2; // reduced with compression factor 1
|
||||||
|
REDUCED2: 3; // reduced with compression factor 2
|
||||||
|
REDUCED3: 4; // reduced with compression factor 3
|
||||||
|
REDUCED4: 5; // reduced with compression factor 4
|
||||||
|
IMPLODED: 6; // imploded
|
||||||
|
// 7 reserved for Tokenizing compression algorithm
|
||||||
|
DEFLATED: 8; // deflated
|
||||||
|
ENHANCED_DEFLATED: 9; // enhanced deflated
|
||||||
|
PKWARE: 10; // PKWare DCL imploded
|
||||||
|
// 11 reserved by PKWARE
|
||||||
|
BZIP2: 12; // compressed using BZIP2
|
||||||
|
// 13 reserved by PKWARE
|
||||||
|
LZMA: 14; // LZMA
|
||||||
|
// 15-17 reserved by PKWARE
|
||||||
|
IBM_TERSE: 18; // compressed using IBM TERSE
|
||||||
|
IBM_LZ77: 19; // IBM LZ77 z
|
||||||
|
AES_ENCRYPT: 99; // WinZIP AES encryption method
|
||||||
|
|
||||||
|
/* General purpose bit flag */
|
||||||
|
// values can obtained with expression 2**bitnr
|
||||||
|
FLG_ENC: 1; // Bit 0: encrypted file
|
||||||
|
FLG_COMP1: 2; // Bit 1, compression option
|
||||||
|
FLG_COMP2: 4; // Bit 2, compression option
|
||||||
|
FLG_DESC: 8; // Bit 3, data descriptor
|
||||||
|
FLG_ENH: 16; // Bit 4, enhanced deflating
|
||||||
|
FLG_PATCH: 32; // Bit 5, indicates that the file is compressed patched data.
|
||||||
|
FLG_STR: 64; // Bit 6, strong encryption (patented)
|
||||||
|
// Bits 7-10: Currently unused.
|
||||||
|
FLG_EFS: 2048; // Bit 11: Language encoding flag (EFS)
|
||||||
|
// Bit 12: Reserved by PKWARE for enhanced compression.
|
||||||
|
// Bit 13: encrypted the Central Directory (patented).
|
||||||
|
// Bits 14-15: Reserved by PKWARE.
|
||||||
|
FLG_MSK: 4096; // mask header values
|
||||||
|
|
||||||
|
/* Load type */
|
||||||
|
FILE: 2;
|
||||||
|
BUFFER: 1;
|
||||||
|
NONE: 0;
|
||||||
|
|
||||||
|
/* 4.5 Extensible data fields */
|
||||||
|
EF_ID: 0;
|
||||||
|
EF_SIZE: 2;
|
||||||
|
|
||||||
|
/* Header IDs */
|
||||||
|
ID_ZIP64: 0x0001;
|
||||||
|
ID_AVINFO: 0x0007;
|
||||||
|
ID_PFS: 0x0008;
|
||||||
|
ID_OS2: 0x0009;
|
||||||
|
ID_NTFS: 0x000a;
|
||||||
|
ID_OPENVMS: 0x000c;
|
||||||
|
ID_UNIX: 0x000d;
|
||||||
|
ID_FORK: 0x000e;
|
||||||
|
ID_PATCH: 0x000f;
|
||||||
|
ID_X509_PKCS7: 0x0014;
|
||||||
|
ID_X509_CERTID_F: 0x0015;
|
||||||
|
ID_X509_CERTID_C: 0x0016;
|
||||||
|
ID_STRONGENC: 0x0017;
|
||||||
|
ID_RECORD_MGT: 0x0018;
|
||||||
|
ID_X509_PKCS7_RL: 0x0019;
|
||||||
|
ID_IBM1: 0x0065;
|
||||||
|
ID_IBM2: 0x0066;
|
||||||
|
ID_POSZIP: 0x4690;
|
||||||
|
|
||||||
|
EF_ZIP64_OR_32: 0xffffffff;
|
||||||
|
EF_ZIP64_OR_16: 0xffff;
|
||||||
|
EF_ZIP64_SUNCOMP: 0;
|
||||||
|
EF_ZIP64_SCOMP: 8;
|
||||||
|
EF_ZIP64_RHO: 16;
|
||||||
|
EF_ZIP64_DSN: 24;
|
||||||
|
};
|
||||||
21
node_modules/adm-zip/LICENSE
generated
vendored
Normal file
21
node_modules/adm-zip/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2012 Another-D-Mention Software and other contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
65
node_modules/adm-zip/README.md
generated
vendored
Normal file
65
node_modules/adm-zip/README.md
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# ADM-ZIP for NodeJS with added support for electron original-fs
|
||||||
|
|
||||||
|
ADM-ZIP is a pure JavaScript implementation for zip data compression for [NodeJS](https://nodejs.org/).
|
||||||
|
|
||||||
|
# Installation
|
||||||
|
|
||||||
|
With [npm](https://www.npmjs.com/) do:
|
||||||
|
|
||||||
|
$ npm install adm-zip
|
||||||
|
|
||||||
|
## What is it good for?
|
||||||
|
|
||||||
|
The library allows you to:
|
||||||
|
|
||||||
|
- decompress zip files directly to disk or in memory buffers
|
||||||
|
- compress files and store them to disk in .zip format or in compressed buffers
|
||||||
|
- update content of/add new/delete files from an existing .zip
|
||||||
|
|
||||||
|
# Dependencies
|
||||||
|
|
||||||
|
There are no other nodeJS libraries that ADM-ZIP is dependent of
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
## Basic usage
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var AdmZip = require("adm-zip");
|
||||||
|
|
||||||
|
// reading archives
|
||||||
|
var zip = new AdmZip("./my_file.zip");
|
||||||
|
var zipEntries = zip.getEntries(); // an array of ZipEntry records
|
||||||
|
|
||||||
|
zipEntries.forEach(function (zipEntry) {
|
||||||
|
console.log(zipEntry.toString()); // outputs zip entries information
|
||||||
|
if (zipEntry.entryName == "my_file.txt") {
|
||||||
|
console.log(zipEntry.getData().toString("utf8"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// outputs the content of some_folder/my_file.txt
|
||||||
|
console.log(zip.readAsText("some_folder/my_file.txt"));
|
||||||
|
// extracts the specified file to the specified location
|
||||||
|
zip.extractEntryTo(/*entry name*/ "some_folder/my_file.txt", /*target path*/ "/home/me/tempfolder", /*maintainEntryPath*/ false, /*overwrite*/ true);
|
||||||
|
// extracts everything
|
||||||
|
zip.extractAllTo(/*target path*/ "/home/me/zipcontent/", /*overwrite*/ true);
|
||||||
|
|
||||||
|
// creating archives
|
||||||
|
var zip = new AdmZip();
|
||||||
|
|
||||||
|
// add file directly
|
||||||
|
var content = "inner content of the file";
|
||||||
|
zip.addFile("test.txt", Buffer.from(content, "utf8"), "entry comment goes here");
|
||||||
|
// add local file
|
||||||
|
zip.addLocalFile("/home/me/some_picture.png");
|
||||||
|
// get everything as a buffer
|
||||||
|
var willSendthis = zip.toBuffer();
|
||||||
|
// or write everything to disk
|
||||||
|
zip.writeZip(/*target file name*/ "/home/me/files.zip");
|
||||||
|
|
||||||
|
// ... more examples in the wiki
|
||||||
|
```
|
||||||
|
|
||||||
|
For more detailed information please check out the [wiki](https://github.com/cthackers/adm-zip/wiki).
|
||||||
|
|
||||||
|
[](https://travis-ci.org/cthackers/adm-zip)
|
||||||
783
node_modules/adm-zip/adm-zip.js
generated
vendored
Normal file
783
node_modules/adm-zip/adm-zip.js
generated
vendored
Normal file
@@ -0,0 +1,783 @@
|
|||||||
|
const Utils = require("./util");
|
||||||
|
const pth = require("path");
|
||||||
|
const ZipEntry = require("./zipEntry");
|
||||||
|
const ZipFile = require("./zipFile");
|
||||||
|
|
||||||
|
const get_Bool = (val, def) => (typeof val === "boolean" ? val : def);
|
||||||
|
const get_Str = (val, def) => (typeof val === "string" ? val : def);
|
||||||
|
|
||||||
|
const defaultOptions = {
|
||||||
|
// option "noSort" : if true it disables files sorting
|
||||||
|
noSort: false,
|
||||||
|
// read entries during load (initial loading may be slower)
|
||||||
|
readEntries: false,
|
||||||
|
// default method is none
|
||||||
|
method: Utils.Constants.NONE,
|
||||||
|
// file system
|
||||||
|
fs: null
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = function (/**String*/ input, /** object */ options) {
|
||||||
|
let inBuffer = null;
|
||||||
|
|
||||||
|
// create object based default options, allowing them to be overwritten
|
||||||
|
const opts = Object.assign(Object.create(null), defaultOptions);
|
||||||
|
|
||||||
|
// test input variable
|
||||||
|
if (input && "object" === typeof input) {
|
||||||
|
// if value is not buffer we accept it to be object with options
|
||||||
|
if (!(input instanceof Uint8Array)) {
|
||||||
|
Object.assign(opts, input);
|
||||||
|
input = opts.input ? opts.input : undefined;
|
||||||
|
if (opts.input) delete opts.input;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if input is buffer
|
||||||
|
if (Buffer.isBuffer(input)) {
|
||||||
|
inBuffer = input;
|
||||||
|
opts.method = Utils.Constants.BUFFER;
|
||||||
|
input = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// assign options
|
||||||
|
Object.assign(opts, options);
|
||||||
|
|
||||||
|
// instanciate utils filesystem
|
||||||
|
const filetools = new Utils(opts);
|
||||||
|
|
||||||
|
// if input is file name we retrieve its content
|
||||||
|
if (input && "string" === typeof input) {
|
||||||
|
// load zip file
|
||||||
|
if (filetools.fs.existsSync(input)) {
|
||||||
|
opts.method = Utils.Constants.FILE;
|
||||||
|
opts.filename = input;
|
||||||
|
inBuffer = filetools.fs.readFileSync(input);
|
||||||
|
} else {
|
||||||
|
throw new Error(Utils.Errors.INVALID_FILENAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// create variable
|
||||||
|
const _zip = new ZipFile(inBuffer, opts);
|
||||||
|
|
||||||
|
const { canonical, sanitize } = Utils;
|
||||||
|
|
||||||
|
function getEntry(/**Object*/ entry) {
|
||||||
|
if (entry && _zip) {
|
||||||
|
var item;
|
||||||
|
// If entry was given as a file name
|
||||||
|
if (typeof entry === "string") item = _zip.getEntry(entry);
|
||||||
|
// if entry was given as a ZipEntry object
|
||||||
|
if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName);
|
||||||
|
|
||||||
|
if (item) {
|
||||||
|
return item;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function fixPath(zipPath) {
|
||||||
|
const { join, normalize, sep } = pth.posix;
|
||||||
|
// convert windows file separators and normalize
|
||||||
|
return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep));
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
/**
|
||||||
|
* Extracts the given entry from the archive and returns the content as a Buffer object
|
||||||
|
* @param entry ZipEntry object or String with the full path of the entry
|
||||||
|
*
|
||||||
|
* @return Buffer or Null in case of error
|
||||||
|
*/
|
||||||
|
readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) {
|
||||||
|
var item = getEntry(entry);
|
||||||
|
return (item && item.getData(pass)) || null;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous readFile
|
||||||
|
* @param entry ZipEntry object or String with the full path of the entry
|
||||||
|
* @param callback
|
||||||
|
*
|
||||||
|
* @return Buffer or Null in case of error
|
||||||
|
*/
|
||||||
|
readFileAsync: function (/**Object*/ entry, /**Function*/ callback) {
|
||||||
|
var item = getEntry(entry);
|
||||||
|
if (item) {
|
||||||
|
item.getDataAsync(callback);
|
||||||
|
} else {
|
||||||
|
callback(null, "getEntry failed for:" + entry);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the given entry from the archive and returns the content as plain text in the given encoding
|
||||||
|
* @param entry ZipEntry object or String with the full path of the entry
|
||||||
|
* @param encoding Optional. If no encoding is specified utf8 is used
|
||||||
|
*
|
||||||
|
* @return String
|
||||||
|
*/
|
||||||
|
readAsText: function (/**Object*/ entry, /**String=*/ encoding) {
|
||||||
|
var item = getEntry(entry);
|
||||||
|
if (item) {
|
||||||
|
var data = item.getData();
|
||||||
|
if (data && data.length) {
|
||||||
|
return data.toString(encoding || "utf8");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous readAsText
|
||||||
|
* @param entry ZipEntry object or String with the full path of the entry
|
||||||
|
* @param callback
|
||||||
|
* @param encoding Optional. If no encoding is specified utf8 is used
|
||||||
|
*
|
||||||
|
* @return String
|
||||||
|
*/
|
||||||
|
readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) {
|
||||||
|
var item = getEntry(entry);
|
||||||
|
if (item) {
|
||||||
|
item.getDataAsync(function (data, err) {
|
||||||
|
if (err) {
|
||||||
|
callback(data, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data && data.length) {
|
||||||
|
callback(data.toString(encoding || "utf8"));
|
||||||
|
} else {
|
||||||
|
callback("");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
callback("");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory
|
||||||
|
*
|
||||||
|
* @param entry
|
||||||
|
*/
|
||||||
|
deleteFile: function (/**Object*/ entry) {
|
||||||
|
// @TODO: test deleteFile
|
||||||
|
var item = getEntry(entry);
|
||||||
|
if (item) {
|
||||||
|
_zip.deleteEntry(item.entryName);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a comment to the zip. The zip must be rewritten after adding the comment.
|
||||||
|
*
|
||||||
|
* @param comment
|
||||||
|
*/
|
||||||
|
addZipComment: function (/**String*/ comment) {
|
||||||
|
// @TODO: test addZipComment
|
||||||
|
_zip.comment = comment;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the zip comment
|
||||||
|
*
|
||||||
|
* @return String
|
||||||
|
*/
|
||||||
|
getZipComment: function () {
|
||||||
|
return _zip.comment || "";
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment
|
||||||
|
* The comment cannot exceed 65535 characters in length
|
||||||
|
*
|
||||||
|
* @param entry
|
||||||
|
* @param comment
|
||||||
|
*/
|
||||||
|
addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) {
|
||||||
|
var item = getEntry(entry);
|
||||||
|
if (item) {
|
||||||
|
item.comment = comment;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the comment of the specified entry
|
||||||
|
*
|
||||||
|
* @param entry
|
||||||
|
* @return String
|
||||||
|
*/
|
||||||
|
getZipEntryComment: function (/**Object*/ entry) {
|
||||||
|
var item = getEntry(entry);
|
||||||
|
if (item) {
|
||||||
|
return item.comment || "";
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content
|
||||||
|
*
|
||||||
|
* @param entry
|
||||||
|
* @param content
|
||||||
|
*/
|
||||||
|
updateFile: function (/**Object*/ entry, /**Buffer*/ content) {
|
||||||
|
var item = getEntry(entry);
|
||||||
|
if (item) {
|
||||||
|
item.setData(content);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a file from the disk to the archive
|
||||||
|
*
|
||||||
|
* @param localPath File to add to zip
|
||||||
|
* @param zipPath Optional path inside the zip
|
||||||
|
* @param zipName Optional name for the file
|
||||||
|
*/
|
||||||
|
addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) {
|
||||||
|
if (filetools.fs.existsSync(localPath)) {
|
||||||
|
// fix ZipPath
|
||||||
|
zipPath = zipPath ? fixPath(zipPath) : "";
|
||||||
|
|
||||||
|
// p - local file name
|
||||||
|
var p = localPath.split("\\").join("/").split("/").pop();
|
||||||
|
|
||||||
|
// add file name into zippath
|
||||||
|
zipPath += zipName ? zipName : p;
|
||||||
|
|
||||||
|
// read file attributes
|
||||||
|
const _attr = filetools.fs.statSync(localPath);
|
||||||
|
|
||||||
|
// add file into zip file
|
||||||
|
this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr);
|
||||||
|
} else {
|
||||||
|
throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a local directory and all its nested files and directories to the archive
|
||||||
|
*
|
||||||
|
* @param localPath
|
||||||
|
* @param zipPath optional path inside zip
|
||||||
|
* @param filter optional RegExp or Function if files match will
|
||||||
|
* be included.
|
||||||
|
*/
|
||||||
|
addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter) {
|
||||||
|
// Prepare filter
|
||||||
|
if (filter instanceof RegExp) {
|
||||||
|
// if filter is RegExp wrap it
|
||||||
|
filter = (function (rx) {
|
||||||
|
return function (filename) {
|
||||||
|
return rx.test(filename);
|
||||||
|
};
|
||||||
|
})(filter);
|
||||||
|
} else if ("function" !== typeof filter) {
|
||||||
|
// if filter is not function we will replace it
|
||||||
|
filter = function () {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// fix ZipPath
|
||||||
|
zipPath = zipPath ? fixPath(zipPath) : "";
|
||||||
|
|
||||||
|
// normalize the path first
|
||||||
|
localPath = pth.normalize(localPath);
|
||||||
|
|
||||||
|
if (filetools.fs.existsSync(localPath)) {
|
||||||
|
const items = filetools.findFiles(localPath);
|
||||||
|
const self = this;
|
||||||
|
|
||||||
|
if (items.length) {
|
||||||
|
items.forEach(function (filepath) {
|
||||||
|
var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
|
||||||
|
if (filter(p)) {
|
||||||
|
var stats = filetools.fs.statSync(filepath);
|
||||||
|
if (stats.isFile()) {
|
||||||
|
self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", stats);
|
||||||
|
} else {
|
||||||
|
self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous addLocalFile
|
||||||
|
* @param localPath
|
||||||
|
* @param callback
|
||||||
|
* @param zipPath optional path inside zip
|
||||||
|
* @param filter optional RegExp or Function if files match will
|
||||||
|
* be included.
|
||||||
|
*/
|
||||||
|
addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) {
|
||||||
|
if (filter instanceof RegExp) {
|
||||||
|
filter = (function (rx) {
|
||||||
|
return function (filename) {
|
||||||
|
return rx.test(filename);
|
||||||
|
};
|
||||||
|
})(filter);
|
||||||
|
} else if ("function" !== typeof filter) {
|
||||||
|
filter = function () {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// fix ZipPath
|
||||||
|
zipPath = zipPath ? fixPath(zipPath) : "";
|
||||||
|
|
||||||
|
// normalize the path first
|
||||||
|
localPath = pth.normalize(localPath);
|
||||||
|
|
||||||
|
var self = this;
|
||||||
|
filetools.fs.open(localPath, "r", function (err) {
|
||||||
|
if (err && err.code === "ENOENT") {
|
||||||
|
callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
|
||||||
|
} else if (err) {
|
||||||
|
callback(undefined, err);
|
||||||
|
} else {
|
||||||
|
var items = filetools.findFiles(localPath);
|
||||||
|
var i = -1;
|
||||||
|
|
||||||
|
var next = function () {
|
||||||
|
i += 1;
|
||||||
|
if (i < items.length) {
|
||||||
|
var filepath = items[i];
|
||||||
|
var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
|
||||||
|
p = p
|
||||||
|
.normalize("NFD")
|
||||||
|
.replace(/[\u0300-\u036f]/g, "")
|
||||||
|
.replace(/[^\x20-\x7E]/g, ""); // accent fix
|
||||||
|
if (filter(p)) {
|
||||||
|
filetools.fs.stat(filepath, function (er0, stats) {
|
||||||
|
if (er0) callback(undefined, er0);
|
||||||
|
if (stats.isFile()) {
|
||||||
|
filetools.fs.readFile(filepath, function (er1, data) {
|
||||||
|
if (er1) {
|
||||||
|
callback(undefined, er1);
|
||||||
|
} else {
|
||||||
|
self.addFile(zipPath + p, data, "", stats);
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats);
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
callback(true, undefined);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {string} localPath - path where files will be extracted
|
||||||
|
* @param {object} props - optional properties
|
||||||
|
* @param {string} props.zipPath - optional path inside zip
|
||||||
|
* @param {regexp, function} props.filter - RegExp or Function if files match will be included.
|
||||||
|
*/
|
||||||
|
addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const { filter, zipPath } = Object.assign({}, props);
|
||||||
|
this.addLocalFolderAsync(
|
||||||
|
localPath,
|
||||||
|
(done, err) => {
|
||||||
|
if (err) reject(err);
|
||||||
|
if (done) resolve(this);
|
||||||
|
},
|
||||||
|
zipPath,
|
||||||
|
filter
|
||||||
|
);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allows you to create a entry (file or directory) in the zip file.
|
||||||
|
* If you want to create a directory the entryName must end in / and a null buffer should be provided.
|
||||||
|
* Comment and attributes are optional
|
||||||
|
*
|
||||||
|
* @param {string} entryName
|
||||||
|
* @param {Buffer | string} content - file content as buffer or utf8 coded string
|
||||||
|
* @param {string} comment - file comment
|
||||||
|
* @param {number | object} attr - number as unix file permissions, object as filesystem Stats object
|
||||||
|
*/
|
||||||
|
addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) {
|
||||||
|
let entry = getEntry(entryName);
|
||||||
|
const update = entry != null;
|
||||||
|
|
||||||
|
// prepare new entry
|
||||||
|
if (!update) {
|
||||||
|
entry = new ZipEntry();
|
||||||
|
entry.entryName = entryName;
|
||||||
|
}
|
||||||
|
entry.comment = comment || "";
|
||||||
|
|
||||||
|
const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats;
|
||||||
|
|
||||||
|
// last modification time from file stats
|
||||||
|
if (isStat) {
|
||||||
|
entry.header.time = attr.mtime;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set file attribute
|
||||||
|
var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag)
|
||||||
|
|
||||||
|
// extended attributes field for Unix
|
||||||
|
if (!Utils.isWin) {
|
||||||
|
// set file type either S_IFDIR / S_IFREG
|
||||||
|
let unix = entry.isDirectory ? 0x4000 : 0x8000;
|
||||||
|
|
||||||
|
if (isStat) {
|
||||||
|
// File attributes from file stats
|
||||||
|
unix |= 0xfff & attr.mode;
|
||||||
|
} else if ("number" === typeof attr) {
|
||||||
|
// attr from given attr values
|
||||||
|
unix |= 0xfff & attr;
|
||||||
|
} else {
|
||||||
|
// Default values:
|
||||||
|
unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--)
|
||||||
|
}
|
||||||
|
|
||||||
|
fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes
|
||||||
|
}
|
||||||
|
|
||||||
|
entry.attr = fileattr;
|
||||||
|
|
||||||
|
entry.setData(content);
|
||||||
|
if (!update) _zip.setEntry(entry);
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array of ZipEntry objects representing the files and folders inside the archive
|
||||||
|
*
|
||||||
|
* @return Array
|
||||||
|
*/
|
||||||
|
getEntries: function () {
|
||||||
|
return _zip ? _zip.entries : [];
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a ZipEntry object representing the file or folder specified by ``name``.
|
||||||
|
*
|
||||||
|
* @param name
|
||||||
|
* @return ZipEntry
|
||||||
|
*/
|
||||||
|
getEntry: function (/**String*/ name) {
|
||||||
|
return getEntry(name);
|
||||||
|
},
|
||||||
|
|
||||||
|
getEntryCount: function () {
|
||||||
|
return _zip.getEntryCount();
|
||||||
|
},
|
||||||
|
|
||||||
|
forEach: function (callback) {
|
||||||
|
return _zip.forEach(callback);
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the given entry to the given targetPath
|
||||||
|
* If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted
|
||||||
|
*
|
||||||
|
* @param entry ZipEntry object or String with the full path of the entry
|
||||||
|
* @param targetPath Target folder where to write the file
|
||||||
|
* @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder
|
||||||
|
* will be created in targetPath as well. Default is TRUE
|
||||||
|
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
||||||
|
* Default is FALSE
|
||||||
|
* @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
|
||||||
|
* Default is FALSE
|
||||||
|
* @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file)
|
||||||
|
*
|
||||||
|
* @return Boolean
|
||||||
|
*/
|
||||||
|
extractEntryTo: function (
|
||||||
|
/**Object*/ entry,
|
||||||
|
/**String*/ targetPath,
|
||||||
|
/**Boolean*/ maintainEntryPath,
|
||||||
|
/**Boolean*/ overwrite,
|
||||||
|
/**Boolean*/ keepOriginalPermission,
|
||||||
|
/**String**/ outFileName
|
||||||
|
) {
|
||||||
|
overwrite = get_Bool(overwrite, false);
|
||||||
|
keepOriginalPermission = get_Bool(keepOriginalPermission, false);
|
||||||
|
maintainEntryPath = get_Bool(maintainEntryPath, true);
|
||||||
|
outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined));
|
||||||
|
|
||||||
|
var item = getEntry(entry);
|
||||||
|
if (!item) {
|
||||||
|
throw new Error(Utils.Errors.NO_ENTRY);
|
||||||
|
}
|
||||||
|
|
||||||
|
var entryName = canonical(item.entryName);
|
||||||
|
|
||||||
|
var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName));
|
||||||
|
|
||||||
|
if (item.isDirectory) {
|
||||||
|
var children = _zip.getEntryChildren(item);
|
||||||
|
children.forEach(function (child) {
|
||||||
|
if (child.isDirectory) return;
|
||||||
|
var content = child.getData();
|
||||||
|
if (!content) {
|
||||||
|
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
||||||
|
}
|
||||||
|
var name = canonical(child.entryName);
|
||||||
|
var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name));
|
||||||
|
// The reverse operation for attr depend on method addFile()
|
||||||
|
const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined;
|
||||||
|
filetools.writeFileTo(childName, content, overwrite, fileAttr);
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
var content = item.getData();
|
||||||
|
if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
||||||
|
|
||||||
|
if (filetools.fs.existsSync(target) && !overwrite) {
|
||||||
|
throw new Error(Utils.Errors.CANT_OVERRIDE);
|
||||||
|
}
|
||||||
|
// The reverse operation for attr depend on method addFile()
|
||||||
|
const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
|
||||||
|
filetools.writeFileTo(target, content, overwrite, fileAttr);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the archive
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
test: function (pass) {
|
||||||
|
if (!_zip) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var entry in _zip.entries) {
|
||||||
|
try {
|
||||||
|
if (entry.isDirectory) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
var content = _zip.entries[entry].getData(pass);
|
||||||
|
if (!content) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the entire archive to the given location
|
||||||
|
*
|
||||||
|
* @param targetPath Target location
|
||||||
|
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
||||||
|
* Default is FALSE
|
||||||
|
* @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
|
||||||
|
* Default is FALSE
|
||||||
|
*/
|
||||||
|
extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) {
|
||||||
|
overwrite = get_Bool(overwrite, false);
|
||||||
|
pass = get_Str(keepOriginalPermission, pass);
|
||||||
|
keepOriginalPermission = get_Bool(keepOriginalPermission, false);
|
||||||
|
if (!_zip) {
|
||||||
|
throw new Error(Utils.Errors.NO_ZIP);
|
||||||
|
}
|
||||||
|
_zip.entries.forEach(function (entry) {
|
||||||
|
var entryName = sanitize(targetPath, canonical(entry.entryName.toString()));
|
||||||
|
if (entry.isDirectory) {
|
||||||
|
filetools.makeDir(entryName);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var content = entry.getData(pass);
|
||||||
|
if (!content) {
|
||||||
|
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
||||||
|
}
|
||||||
|
// The reverse operation for attr depend on method addFile()
|
||||||
|
const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
|
||||||
|
filetools.writeFileTo(entryName, content, overwrite, fileAttr);
|
||||||
|
try {
|
||||||
|
filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time);
|
||||||
|
} catch (err) {
|
||||||
|
throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous extractAllTo
|
||||||
|
*
|
||||||
|
* @param targetPath Target location
|
||||||
|
* @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
|
||||||
|
* Default is FALSE
|
||||||
|
* @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
|
||||||
|
* Default is FALSE
|
||||||
|
* @param callback The callback will be executed when all entries are extracted successfully or any error is thrown.
|
||||||
|
*/
|
||||||
|
extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) {
|
||||||
|
if (!callback) {
|
||||||
|
callback = function () {};
|
||||||
|
}
|
||||||
|
overwrite = get_Bool(overwrite, false);
|
||||||
|
if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission;
|
||||||
|
keepOriginalPermission = get_Bool(keepOriginalPermission, false);
|
||||||
|
if (!_zip) {
|
||||||
|
callback(new Error(Utils.Errors.NO_ZIP));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
targetPath = pth.resolve(targetPath);
|
||||||
|
// convert entryName to
|
||||||
|
const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString())));
|
||||||
|
const getError = (msg, file) => new Error(msg + ': "' + file + '"');
|
||||||
|
|
||||||
|
// separate directories from files
|
||||||
|
const dirEntries = [];
|
||||||
|
const fileEntries = new Set();
|
||||||
|
_zip.entries.forEach((e) => {
|
||||||
|
if (e.isDirectory) {
|
||||||
|
dirEntries.push(e);
|
||||||
|
} else {
|
||||||
|
fileEntries.add(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create directory entries first synchronously
|
||||||
|
// this prevents race condition and assures folders are there before writing files
|
||||||
|
for (const entry of dirEntries) {
|
||||||
|
const dirPath = getPath(entry);
|
||||||
|
// The reverse operation for attr depend on method addFile()
|
||||||
|
const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
|
||||||
|
try {
|
||||||
|
filetools.makeDir(dirPath);
|
||||||
|
if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr);
|
||||||
|
// in unix timestamp will change if files are later added to folder, but still
|
||||||
|
filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time);
|
||||||
|
} catch (er) {
|
||||||
|
callback(getError("Unable to create folder", dirPath));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// callback wrapper, for some house keeping
|
||||||
|
const done = () => {
|
||||||
|
if (fileEntries.size === 0) {
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Extract file entries asynchronously
|
||||||
|
for (const entry of fileEntries.values()) {
|
||||||
|
const entryName = pth.normalize(canonical(entry.entryName.toString()));
|
||||||
|
const filePath = sanitize(targetPath, entryName);
|
||||||
|
entry.getDataAsync(function (content, err_1) {
|
||||||
|
if (err_1) {
|
||||||
|
callback(new Error(err_1));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!content) {
|
||||||
|
callback(new Error(Utils.Errors.CANT_EXTRACT_FILE));
|
||||||
|
} else {
|
||||||
|
// The reverse operation for attr depend on method addFile()
|
||||||
|
const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
|
||||||
|
filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) {
|
||||||
|
if (!succ) {
|
||||||
|
callback(getError("Unable to write file", filePath));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) {
|
||||||
|
if (err_2) {
|
||||||
|
callback(getError("Unable to set times", filePath));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
fileEntries.delete(entry);
|
||||||
|
// call the callback if it was last entry
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// call the callback if fileEntries was empty
|
||||||
|
done();
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip
|
||||||
|
*
|
||||||
|
* @param targetFileName
|
||||||
|
* @param callback
|
||||||
|
*/
|
||||||
|
writeZip: function (/**String*/ targetFileName, /**Function*/ callback) {
|
||||||
|
if (arguments.length === 1) {
|
||||||
|
if (typeof targetFileName === "function") {
|
||||||
|
callback = targetFileName;
|
||||||
|
targetFileName = "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!targetFileName && opts.filename) {
|
||||||
|
targetFileName = opts.filename;
|
||||||
|
}
|
||||||
|
if (!targetFileName) return;
|
||||||
|
|
||||||
|
var zipData = _zip.compressToBuffer();
|
||||||
|
if (zipData) {
|
||||||
|
var ok = filetools.writeFileTo(targetFileName, zipData, true);
|
||||||
|
if (typeof callback === "function") callback(!ok ? new Error("failed") : null, "");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
writeZipPromise: function (/**String*/ targetFileName, /* object */ props) {
|
||||||
|
const { overwrite, perm } = Object.assign({ overwrite: true }, props);
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
// find file name
|
||||||
|
if (!targetFileName && opts.filename) targetFileName = opts.filename;
|
||||||
|
if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing");
|
||||||
|
|
||||||
|
this.toBufferPromise().then((zipData) => {
|
||||||
|
const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file"));
|
||||||
|
filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret);
|
||||||
|
}, reject);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
toBufferPromise: function () {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
_zip.toAsyncBuffer(resolve, reject);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the content of the entire zip file as a Buffer object
|
||||||
|
*
|
||||||
|
* @return Buffer
|
||||||
|
*/
|
||||||
|
toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) {
|
||||||
|
this.valueOf = 2;
|
||||||
|
if (typeof onSuccess === "function") {
|
||||||
|
_zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return _zip.compressToBuffer();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
338
node_modules/adm-zip/headers/entryHeader.js
generated
vendored
Normal file
338
node_modules/adm-zip/headers/entryHeader.js
generated
vendored
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
var Utils = require("../util"),
|
||||||
|
Constants = Utils.Constants;
|
||||||
|
|
||||||
|
/* The central directory file header */
|
||||||
|
module.exports = function () {
|
||||||
|
var _verMade = 20, // v2.0
|
||||||
|
_version = 10, // v1.0
|
||||||
|
_flags = 0,
|
||||||
|
_method = 0,
|
||||||
|
_time = 0,
|
||||||
|
_crc = 0,
|
||||||
|
_compressedSize = 0,
|
||||||
|
_size = 0,
|
||||||
|
_fnameLen = 0,
|
||||||
|
_extraLen = 0,
|
||||||
|
_comLen = 0,
|
||||||
|
_diskStart = 0,
|
||||||
|
_inattr = 0,
|
||||||
|
_attr = 0,
|
||||||
|
_offset = 0;
|
||||||
|
|
||||||
|
_verMade |= Utils.isWin ? 0x0a00 : 0x0300;
|
||||||
|
|
||||||
|
// Set EFS flag since filename and comment fields are all by default encoded using UTF-8.
|
||||||
|
// Without it file names may be corrupted for other apps when file names use unicode chars
|
||||||
|
_flags |= Constants.FLG_EFS;
|
||||||
|
|
||||||
|
var _dataHeader = {};
|
||||||
|
|
||||||
|
function setTime(val) {
|
||||||
|
val = new Date(val);
|
||||||
|
_time =
|
||||||
|
(((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980
|
||||||
|
((val.getMonth() + 1) << 21) | // b05-08 month
|
||||||
|
(val.getDate() << 16) | // b00-04 hour
|
||||||
|
// 2 bytes time
|
||||||
|
(val.getHours() << 11) | // b11-15 hour
|
||||||
|
(val.getMinutes() << 5) | // b05-10 minute
|
||||||
|
(val.getSeconds() >> 1); // b00-04 seconds divided by 2
|
||||||
|
}
|
||||||
|
|
||||||
|
setTime(+new Date());
|
||||||
|
|
||||||
|
return {
|
||||||
|
get made() {
|
||||||
|
return _verMade;
|
||||||
|
},
|
||||||
|
set made(val) {
|
||||||
|
_verMade = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get version() {
|
||||||
|
return _version;
|
||||||
|
},
|
||||||
|
set version(val) {
|
||||||
|
_version = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get flags() {
|
||||||
|
return _flags;
|
||||||
|
},
|
||||||
|
set flags(val) {
|
||||||
|
_flags = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get method() {
|
||||||
|
return _method;
|
||||||
|
},
|
||||||
|
set method(val) {
|
||||||
|
switch (val) {
|
||||||
|
case Constants.STORED:
|
||||||
|
this.version = 10;
|
||||||
|
case Constants.DEFLATED:
|
||||||
|
default:
|
||||||
|
this.version = 20;
|
||||||
|
}
|
||||||
|
_method = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get time() {
|
||||||
|
return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1);
|
||||||
|
},
|
||||||
|
set time(val) {
|
||||||
|
setTime(val);
|
||||||
|
},
|
||||||
|
|
||||||
|
get crc() {
|
||||||
|
return _crc;
|
||||||
|
},
|
||||||
|
set crc(val) {
|
||||||
|
_crc = Math.max(0, val) >>> 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
get compressedSize() {
|
||||||
|
return _compressedSize;
|
||||||
|
},
|
||||||
|
set compressedSize(val) {
|
||||||
|
_compressedSize = Math.max(0, val) >>> 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
get size() {
|
||||||
|
return _size;
|
||||||
|
},
|
||||||
|
set size(val) {
|
||||||
|
_size = Math.max(0, val) >>> 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
get fileNameLength() {
|
||||||
|
return _fnameLen;
|
||||||
|
},
|
||||||
|
set fileNameLength(val) {
|
||||||
|
_fnameLen = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get extraLength() {
|
||||||
|
return _extraLen;
|
||||||
|
},
|
||||||
|
set extraLength(val) {
|
||||||
|
_extraLen = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get commentLength() {
|
||||||
|
return _comLen;
|
||||||
|
},
|
||||||
|
set commentLength(val) {
|
||||||
|
_comLen = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get diskNumStart() {
|
||||||
|
return _diskStart;
|
||||||
|
},
|
||||||
|
set diskNumStart(val) {
|
||||||
|
_diskStart = Math.max(0, val) >>> 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
get inAttr() {
|
||||||
|
return _inattr;
|
||||||
|
},
|
||||||
|
set inAttr(val) {
|
||||||
|
_inattr = Math.max(0, val) >>> 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
get attr() {
|
||||||
|
return _attr;
|
||||||
|
},
|
||||||
|
set attr(val) {
|
||||||
|
_attr = Math.max(0, val) >>> 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
// get Unix file permissions
|
||||||
|
get fileAttr() {
|
||||||
|
return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
get offset() {
|
||||||
|
return _offset;
|
||||||
|
},
|
||||||
|
set offset(val) {
|
||||||
|
_offset = Math.max(0, val) >>> 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
get encripted() {
|
||||||
|
return (_flags & 1) === 1;
|
||||||
|
},
|
||||||
|
|
||||||
|
get entryHeaderSize() {
|
||||||
|
return Constants.CENHDR + _fnameLen + _extraLen + _comLen;
|
||||||
|
},
|
||||||
|
|
||||||
|
get realDataOffset() {
|
||||||
|
return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen;
|
||||||
|
},
|
||||||
|
|
||||||
|
get dataHeader() {
|
||||||
|
return _dataHeader;
|
||||||
|
},
|
||||||
|
|
||||||
|
loadDataHeaderFromBinary: function (/*Buffer*/ input) {
|
||||||
|
var data = input.slice(_offset, _offset + Constants.LOCHDR);
|
||||||
|
// 30 bytes and should start with "PK\003\004"
|
||||||
|
if (data.readUInt32LE(0) !== Constants.LOCSIG) {
|
||||||
|
throw new Error(Utils.Errors.INVALID_LOC);
|
||||||
|
}
|
||||||
|
_dataHeader = {
|
||||||
|
// version needed to extract
|
||||||
|
version: data.readUInt16LE(Constants.LOCVER),
|
||||||
|
// general purpose bit flag
|
||||||
|
flags: data.readUInt16LE(Constants.LOCFLG),
|
||||||
|
// compression method
|
||||||
|
method: data.readUInt16LE(Constants.LOCHOW),
|
||||||
|
// modification time (2 bytes time, 2 bytes date)
|
||||||
|
time: data.readUInt32LE(Constants.LOCTIM),
|
||||||
|
// uncompressed file crc-32 value
|
||||||
|
crc: data.readUInt32LE(Constants.LOCCRC),
|
||||||
|
// compressed size
|
||||||
|
compressedSize: data.readUInt32LE(Constants.LOCSIZ),
|
||||||
|
// uncompressed size
|
||||||
|
size: data.readUInt32LE(Constants.LOCLEN),
|
||||||
|
// filename length
|
||||||
|
fnameLen: data.readUInt16LE(Constants.LOCNAM),
|
||||||
|
// extra field length
|
||||||
|
extraLen: data.readUInt16LE(Constants.LOCEXT)
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
loadFromBinary: function (/*Buffer*/ data) {
|
||||||
|
// data should be 46 bytes and start with "PK 01 02"
|
||||||
|
if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) {
|
||||||
|
throw new Error(Utils.Errors.INVALID_CEN);
|
||||||
|
}
|
||||||
|
// version made by
|
||||||
|
_verMade = data.readUInt16LE(Constants.CENVEM);
|
||||||
|
// version needed to extract
|
||||||
|
_version = data.readUInt16LE(Constants.CENVER);
|
||||||
|
// encrypt, decrypt flags
|
||||||
|
_flags = data.readUInt16LE(Constants.CENFLG);
|
||||||
|
// compression method
|
||||||
|
_method = data.readUInt16LE(Constants.CENHOW);
|
||||||
|
// modification time (2 bytes time, 2 bytes date)
|
||||||
|
_time = data.readUInt32LE(Constants.CENTIM);
|
||||||
|
// uncompressed file crc-32 value
|
||||||
|
_crc = data.readUInt32LE(Constants.CENCRC);
|
||||||
|
// compressed size
|
||||||
|
_compressedSize = data.readUInt32LE(Constants.CENSIZ);
|
||||||
|
// uncompressed size
|
||||||
|
_size = data.readUInt32LE(Constants.CENLEN);
|
||||||
|
// filename length
|
||||||
|
_fnameLen = data.readUInt16LE(Constants.CENNAM);
|
||||||
|
// extra field length
|
||||||
|
_extraLen = data.readUInt16LE(Constants.CENEXT);
|
||||||
|
// file comment length
|
||||||
|
_comLen = data.readUInt16LE(Constants.CENCOM);
|
||||||
|
// volume number start
|
||||||
|
_diskStart = data.readUInt16LE(Constants.CENDSK);
|
||||||
|
// internal file attributes
|
||||||
|
_inattr = data.readUInt16LE(Constants.CENATT);
|
||||||
|
// external file attributes
|
||||||
|
_attr = data.readUInt32LE(Constants.CENATX);
|
||||||
|
// LOC header offset
|
||||||
|
_offset = data.readUInt32LE(Constants.CENOFF);
|
||||||
|
},
|
||||||
|
|
||||||
|
dataHeaderToBinary: function () {
|
||||||
|
// LOC header size (30 bytes)
|
||||||
|
var data = Buffer.alloc(Constants.LOCHDR);
|
||||||
|
// "PK\003\004"
|
||||||
|
data.writeUInt32LE(Constants.LOCSIG, 0);
|
||||||
|
// version needed to extract
|
||||||
|
data.writeUInt16LE(_version, Constants.LOCVER);
|
||||||
|
// general purpose bit flag
|
||||||
|
data.writeUInt16LE(_flags, Constants.LOCFLG);
|
||||||
|
// compression method
|
||||||
|
data.writeUInt16LE(_method, Constants.LOCHOW);
|
||||||
|
// modification time (2 bytes time, 2 bytes date)
|
||||||
|
data.writeUInt32LE(_time, Constants.LOCTIM);
|
||||||
|
// uncompressed file crc-32 value
|
||||||
|
data.writeUInt32LE(_crc, Constants.LOCCRC);
|
||||||
|
// compressed size
|
||||||
|
data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);
|
||||||
|
// uncompressed size
|
||||||
|
data.writeUInt32LE(_size, Constants.LOCLEN);
|
||||||
|
// filename length
|
||||||
|
data.writeUInt16LE(_fnameLen, Constants.LOCNAM);
|
||||||
|
// extra field length
|
||||||
|
data.writeUInt16LE(_extraLen, Constants.LOCEXT);
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
|
||||||
|
entryHeaderToBinary: function () {
|
||||||
|
// CEN header size (46 bytes)
|
||||||
|
var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen);
|
||||||
|
// "PK\001\002"
|
||||||
|
data.writeUInt32LE(Constants.CENSIG, 0);
|
||||||
|
// version made by
|
||||||
|
data.writeUInt16LE(_verMade, Constants.CENVEM);
|
||||||
|
// version needed to extract
|
||||||
|
data.writeUInt16LE(_version, Constants.CENVER);
|
||||||
|
// encrypt, decrypt flags
|
||||||
|
data.writeUInt16LE(_flags, Constants.CENFLG);
|
||||||
|
// compression method
|
||||||
|
data.writeUInt16LE(_method, Constants.CENHOW);
|
||||||
|
// modification time (2 bytes time, 2 bytes date)
|
||||||
|
data.writeUInt32LE(_time, Constants.CENTIM);
|
||||||
|
// uncompressed file crc-32 value
|
||||||
|
data.writeUInt32LE(_crc, Constants.CENCRC);
|
||||||
|
// compressed size
|
||||||
|
data.writeUInt32LE(_compressedSize, Constants.CENSIZ);
|
||||||
|
// uncompressed size
|
||||||
|
data.writeUInt32LE(_size, Constants.CENLEN);
|
||||||
|
// filename length
|
||||||
|
data.writeUInt16LE(_fnameLen, Constants.CENNAM);
|
||||||
|
// extra field length
|
||||||
|
data.writeUInt16LE(_extraLen, Constants.CENEXT);
|
||||||
|
// file comment length
|
||||||
|
data.writeUInt16LE(_comLen, Constants.CENCOM);
|
||||||
|
// volume number start
|
||||||
|
data.writeUInt16LE(_diskStart, Constants.CENDSK);
|
||||||
|
// internal file attributes
|
||||||
|
data.writeUInt16LE(_inattr, Constants.CENATT);
|
||||||
|
// external file attributes
|
||||||
|
data.writeUInt32LE(_attr, Constants.CENATX);
|
||||||
|
// LOC header offset
|
||||||
|
data.writeUInt32LE(_offset, Constants.CENOFF);
|
||||||
|
// fill all with
|
||||||
|
data.fill(0x00, Constants.CENHDR);
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
|
||||||
|
toJSON: function () {
|
||||||
|
const bytes = function (nr) {
|
||||||
|
return nr + " bytes";
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
made: _verMade,
|
||||||
|
version: _version,
|
||||||
|
flags: _flags,
|
||||||
|
method: Utils.methodToString(_method),
|
||||||
|
time: this.time,
|
||||||
|
crc: "0x" + _crc.toString(16).toUpperCase(),
|
||||||
|
compressedSize: bytes(_compressedSize),
|
||||||
|
size: bytes(_size),
|
||||||
|
fileNameLength: bytes(_fnameLen),
|
||||||
|
extraLength: bytes(_extraLen),
|
||||||
|
commentLength: bytes(_comLen),
|
||||||
|
diskNumStart: _diskStart,
|
||||||
|
inAttr: _inattr,
|
||||||
|
attr: _attr,
|
||||||
|
offset: _offset,
|
||||||
|
entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen)
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
toString: function () {
|
||||||
|
return JSON.stringify(this.toJSON(), null, "\t");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
2
node_modules/adm-zip/headers/index.js
generated
vendored
Normal file
2
node_modules/adm-zip/headers/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
exports.EntryHeader = require("./entryHeader");
|
||||||
|
exports.MainHeader = require("./mainHeader");
|
||||||
129
node_modules/adm-zip/headers/mainHeader.js
generated
vendored
Normal file
129
node_modules/adm-zip/headers/mainHeader.js
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
var Utils = require("../util"),
|
||||||
|
Constants = Utils.Constants;
|
||||||
|
|
||||||
|
/* The entries in the end of central directory */
|
||||||
|
module.exports = function () {
|
||||||
|
var _volumeEntries = 0,
|
||||||
|
_totalEntries = 0,
|
||||||
|
_size = 0,
|
||||||
|
_offset = 0,
|
||||||
|
_commentLength = 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
get diskEntries() {
|
||||||
|
return _volumeEntries;
|
||||||
|
},
|
||||||
|
set diskEntries(/*Number*/ val) {
|
||||||
|
_volumeEntries = _totalEntries = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get totalEntries() {
|
||||||
|
return _totalEntries;
|
||||||
|
},
|
||||||
|
set totalEntries(/*Number*/ val) {
|
||||||
|
_totalEntries = _volumeEntries = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get size() {
|
||||||
|
return _size;
|
||||||
|
},
|
||||||
|
set size(/*Number*/ val) {
|
||||||
|
_size = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get offset() {
|
||||||
|
return _offset;
|
||||||
|
},
|
||||||
|
set offset(/*Number*/ val) {
|
||||||
|
_offset = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get commentLength() {
|
||||||
|
return _commentLength;
|
||||||
|
},
|
||||||
|
set commentLength(/*Number*/ val) {
|
||||||
|
_commentLength = val;
|
||||||
|
},
|
||||||
|
|
||||||
|
get mainHeaderSize() {
|
||||||
|
return Constants.ENDHDR + _commentLength;
|
||||||
|
},
|
||||||
|
|
||||||
|
loadFromBinary: function (/*Buffer*/ data) {
|
||||||
|
// data should be 22 bytes and start with "PK 05 06"
|
||||||
|
// or be 56+ bytes and start with "PK 06 06" for Zip64
|
||||||
|
if (
|
||||||
|
(data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) &&
|
||||||
|
(data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)
|
||||||
|
) {
|
||||||
|
throw new Error(Utils.Errors.INVALID_END);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.readUInt32LE(0) === Constants.ENDSIG) {
|
||||||
|
// number of entries on this volume
|
||||||
|
_volumeEntries = data.readUInt16LE(Constants.ENDSUB);
|
||||||
|
// total number of entries
|
||||||
|
_totalEntries = data.readUInt16LE(Constants.ENDTOT);
|
||||||
|
// central directory size in bytes
|
||||||
|
_size = data.readUInt32LE(Constants.ENDSIZ);
|
||||||
|
// offset of first CEN header
|
||||||
|
_offset = data.readUInt32LE(Constants.ENDOFF);
|
||||||
|
// zip file comment length
|
||||||
|
_commentLength = data.readUInt16LE(Constants.ENDCOM);
|
||||||
|
} else {
|
||||||
|
// number of entries on this volume
|
||||||
|
_volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB);
|
||||||
|
// total number of entries
|
||||||
|
_totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT);
|
||||||
|
// central directory size in bytes
|
||||||
|
_size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZ);
|
||||||
|
// offset of first CEN header
|
||||||
|
_offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF);
|
||||||
|
|
||||||
|
_commentLength = 0;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
toBinary: function () {
|
||||||
|
var b = Buffer.alloc(Constants.ENDHDR + _commentLength);
|
||||||
|
// "PK 05 06" signature
|
||||||
|
b.writeUInt32LE(Constants.ENDSIG, 0);
|
||||||
|
b.writeUInt32LE(0, 4);
|
||||||
|
// number of entries on this volume
|
||||||
|
b.writeUInt16LE(_volumeEntries, Constants.ENDSUB);
|
||||||
|
// total number of entries
|
||||||
|
b.writeUInt16LE(_totalEntries, Constants.ENDTOT);
|
||||||
|
// central directory size in bytes
|
||||||
|
b.writeUInt32LE(_size, Constants.ENDSIZ);
|
||||||
|
// offset of first CEN header
|
||||||
|
b.writeUInt32LE(_offset, Constants.ENDOFF);
|
||||||
|
// zip file comment length
|
||||||
|
b.writeUInt16LE(_commentLength, Constants.ENDCOM);
|
||||||
|
// fill comment memory with spaces so no garbage is left there
|
||||||
|
b.fill(" ", Constants.ENDHDR);
|
||||||
|
|
||||||
|
return b;
|
||||||
|
},
|
||||||
|
|
||||||
|
toJSON: function () {
|
||||||
|
// creates 0x0000 style output
|
||||||
|
const offset = function (nr, len) {
|
||||||
|
let offs = nr.toString(16).toUpperCase();
|
||||||
|
while (offs.length < len) offs = "0" + offs;
|
||||||
|
return "0x" + offs;
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
diskEntries: _volumeEntries,
|
||||||
|
totalEntries: _totalEntries,
|
||||||
|
size: _size + " bytes",
|
||||||
|
offset: offset(_offset, 4),
|
||||||
|
commentLength: _commentLength
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
toString: function () {
|
||||||
|
return JSON.stringify(this.toJSON(), null, "\t");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
33
node_modules/adm-zip/methods/deflater.js
generated
vendored
Normal file
33
node_modules/adm-zip/methods/deflater.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
module.exports = function (/*Buffer*/ inbuf) {
|
||||||
|
var zlib = require("zlib");
|
||||||
|
|
||||||
|
var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 };
|
||||||
|
|
||||||
|
return {
|
||||||
|
deflate: function () {
|
||||||
|
return zlib.deflateRawSync(inbuf, opts);
|
||||||
|
},
|
||||||
|
|
||||||
|
deflateAsync: function (/*Function*/ callback) {
|
||||||
|
var tmp = zlib.createDeflateRaw(opts),
|
||||||
|
parts = [],
|
||||||
|
total = 0;
|
||||||
|
tmp.on("data", function (data) {
|
||||||
|
parts.push(data);
|
||||||
|
total += data.length;
|
||||||
|
});
|
||||||
|
tmp.on("end", function () {
|
||||||
|
var buf = Buffer.alloc(total),
|
||||||
|
written = 0;
|
||||||
|
buf.fill(0);
|
||||||
|
for (var i = 0; i < parts.length; i++) {
|
||||||
|
var part = parts[i];
|
||||||
|
part.copy(buf, written);
|
||||||
|
written += part.length;
|
||||||
|
}
|
||||||
|
callback && callback(buf);
|
||||||
|
});
|
||||||
|
tmp.end(inbuf);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
3
node_modules/adm-zip/methods/index.js
generated
vendored
Normal file
3
node_modules/adm-zip/methods/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
exports.Deflater = require("./deflater");
|
||||||
|
exports.Inflater = require("./inflater");
|
||||||
|
exports.ZipCrypto = require("./zipcrypto");
|
||||||
31
node_modules/adm-zip/methods/inflater.js
generated
vendored
Normal file
31
node_modules/adm-zip/methods/inflater.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
module.exports = function (/*Buffer*/ inbuf) {
|
||||||
|
var zlib = require("zlib");
|
||||||
|
|
||||||
|
return {
|
||||||
|
inflate: function () {
|
||||||
|
return zlib.inflateRawSync(inbuf);
|
||||||
|
},
|
||||||
|
|
||||||
|
inflateAsync: function (/*Function*/ callback) {
|
||||||
|
var tmp = zlib.createInflateRaw(),
|
||||||
|
parts = [],
|
||||||
|
total = 0;
|
||||||
|
tmp.on("data", function (data) {
|
||||||
|
parts.push(data);
|
||||||
|
total += data.length;
|
||||||
|
});
|
||||||
|
tmp.on("end", function () {
|
||||||
|
var buf = Buffer.alloc(total),
|
||||||
|
written = 0;
|
||||||
|
buf.fill(0);
|
||||||
|
for (var i = 0; i < parts.length; i++) {
|
||||||
|
var part = parts[i];
|
||||||
|
part.copy(buf, written);
|
||||||
|
written += part.length;
|
||||||
|
}
|
||||||
|
callback && callback(buf);
|
||||||
|
});
|
||||||
|
tmp.end(inbuf);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
170
node_modules/adm-zip/methods/zipcrypto.js
generated
vendored
Normal file
170
node_modules/adm-zip/methods/zipcrypto.js
generated
vendored
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
// node crypt, we use it for generate salt
|
||||||
|
// eslint-disable-next-line node/no-unsupported-features/node-builtins
|
||||||
|
const { randomFillSync } = require("crypto");
|
||||||
|
|
||||||
|
// generate CRC32 lookup table
|
||||||
|
const crctable = new Uint32Array(256).map((t, crc) => {
|
||||||
|
for (let j = 0; j < 8; j++) {
|
||||||
|
if (0 !== (crc & 1)) {
|
||||||
|
crc = (crc >>> 1) ^ 0xedb88320;
|
||||||
|
} else {
|
||||||
|
crc >>>= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return crc >>> 0;
|
||||||
|
});
|
||||||
|
|
||||||
|
// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits)
|
||||||
|
const uMul = (a, b) => Math.imul(a, b) >>> 0;
|
||||||
|
|
||||||
|
// crc32 byte single update (actually same function is part of utils.crc32 function :) )
|
||||||
|
const crc32update = (pCrc32, bval) => {
|
||||||
|
return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8);
|
||||||
|
};
|
||||||
|
|
||||||
|
// function for generating salt for encrytion header
|
||||||
|
const genSalt = () => {
|
||||||
|
if ("function" === typeof randomFillSync) {
|
||||||
|
return randomFillSync(Buffer.alloc(12));
|
||||||
|
} else {
|
||||||
|
// fallback if function is not defined
|
||||||
|
return genSalt.node();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// salt generation with node random function (mainly as fallback)
|
||||||
|
genSalt.node = () => {
|
||||||
|
const salt = Buffer.alloc(12);
|
||||||
|
const len = salt.length;
|
||||||
|
for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff;
|
||||||
|
return salt;
|
||||||
|
};
|
||||||
|
|
||||||
|
// general config
|
||||||
|
const config = {
|
||||||
|
genSalt
|
||||||
|
};
|
||||||
|
|
||||||
|
// Class Initkeys handles same basic ops with keys
|
||||||
|
function Initkeys(pw) {
|
||||||
|
const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw);
|
||||||
|
this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]);
|
||||||
|
for (let i = 0; i < pass.length; i++) {
|
||||||
|
this.updateKeys(pass[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Initkeys.prototype.updateKeys = function (byteValue) {
|
||||||
|
const keys = this.keys;
|
||||||
|
keys[0] = crc32update(keys[0], byteValue);
|
||||||
|
keys[1] += keys[0] & 0xff;
|
||||||
|
keys[1] = uMul(keys[1], 134775813) + 1;
|
||||||
|
keys[2] = crc32update(keys[2], keys[1] >>> 24);
|
||||||
|
return byteValue;
|
||||||
|
};
|
||||||
|
|
||||||
|
Initkeys.prototype.next = function () {
|
||||||
|
const k = (this.keys[2] | 2) >>> 0; // key
|
||||||
|
return (uMul(k, k ^ 1) >> 8) & 0xff; // decode
|
||||||
|
};
|
||||||
|
|
||||||
|
function make_decrypter(/*Buffer*/ pwd) {
|
||||||
|
// 1. Stage initialize key
|
||||||
|
const keys = new Initkeys(pwd);
|
||||||
|
|
||||||
|
// return decrypter function
|
||||||
|
return function (/*Buffer*/ data) {
|
||||||
|
// result - we create new Buffer for results
|
||||||
|
const result = Buffer.alloc(data.length);
|
||||||
|
let pos = 0;
|
||||||
|
// process input data
|
||||||
|
for (let c of data) {
|
||||||
|
//c ^= keys.next();
|
||||||
|
//result[pos++] = c; // decode & Save Value
|
||||||
|
result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function make_encrypter(/*Buffer*/ pwd) {
|
||||||
|
// 1. Stage initialize key
|
||||||
|
const keys = new Initkeys(pwd);
|
||||||
|
|
||||||
|
// return encrypting function, result and pos is here so we dont have to merge buffers later
|
||||||
|
return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) {
|
||||||
|
// result - we create new Buffer for results
|
||||||
|
if (!result) result = Buffer.alloc(data.length);
|
||||||
|
// process input data
|
||||||
|
for (let c of data) {
|
||||||
|
const k = keys.next(); // save key byte
|
||||||
|
result[pos++] = c ^ k; // save val
|
||||||
|
keys.updateKeys(c); // update keys with decoded byte
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) {
|
||||||
|
if (!data || !Buffer.isBuffer(data) || data.length < 12) {
|
||||||
|
return Buffer.alloc(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. We Initialize and generate decrypting function
|
||||||
|
const decrypter = make_decrypter(pwd);
|
||||||
|
|
||||||
|
// 2. decrypt salt what is always 12 bytes and is a part of file content
|
||||||
|
const salt = decrypter(data.slice(0, 12));
|
||||||
|
|
||||||
|
// 3. does password meet expectations
|
||||||
|
if (salt[11] !== header.crc >>> 24) {
|
||||||
|
throw "ADM-ZIP: Wrong Password";
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. decode content
|
||||||
|
return decrypter(data.slice(12));
|
||||||
|
}
|
||||||
|
|
||||||
|
// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality
|
||||||
|
function _salter(data) {
|
||||||
|
if (Buffer.isBuffer(data) && data.length >= 12) {
|
||||||
|
// be aware - currently salting buffer data is modified
|
||||||
|
config.genSalt = function () {
|
||||||
|
return data.slice(0, 12);
|
||||||
|
};
|
||||||
|
} else if (data === "node") {
|
||||||
|
// test salt generation with node random function
|
||||||
|
config.genSalt = genSalt.node;
|
||||||
|
} else {
|
||||||
|
// if value is not acceptable config gets reset.
|
||||||
|
config.genSalt = genSalt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) {
|
||||||
|
// 1. test data if data is not Buffer we make buffer from it
|
||||||
|
if (data == null) data = Buffer.alloc(0);
|
||||||
|
// if data is not buffer be make buffer from it
|
||||||
|
if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString());
|
||||||
|
|
||||||
|
// 2. We Initialize and generate encrypting function
|
||||||
|
const encrypter = make_encrypter(pwd);
|
||||||
|
|
||||||
|
// 3. generate salt (12-bytes of random data)
|
||||||
|
const salt = config.genSalt();
|
||||||
|
salt[11] = (header.crc >>> 24) & 0xff;
|
||||||
|
|
||||||
|
// old implementations (before PKZip 2.04g) used two byte check
|
||||||
|
if (oldlike) salt[10] = (header.crc >>> 16) & 0xff;
|
||||||
|
|
||||||
|
// 4. create output
|
||||||
|
const result = Buffer.alloc(data.length + 12);
|
||||||
|
encrypter(salt, result);
|
||||||
|
|
||||||
|
// finally encode content
|
||||||
|
return encrypter(data, result, 12);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { decrypt, encrypt, _salter };
|
||||||
48
node_modules/adm-zip/package.json
generated
vendored
Normal file
48
node_modules/adm-zip/package.json
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
{
|
||||||
|
"name": "adm-zip",
|
||||||
|
"version": "0.5.9",
|
||||||
|
"description": "Javascript implementation of zip for nodejs with support for electron original-fs. Allows user to create or extract zip files both in memory or to/from disk",
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha -R spec",
|
||||||
|
"test:format": "npm run format:prettier:raw -- --check",
|
||||||
|
"format": "npm run format:prettier",
|
||||||
|
"format:prettier": "npm run format:prettier:raw -- --write",
|
||||||
|
"format:prettier:raw": "prettier \"**/*.{js,yml,json}\""
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"zip",
|
||||||
|
"methods",
|
||||||
|
"archive",
|
||||||
|
"unzip"
|
||||||
|
],
|
||||||
|
"homepage": "https://github.com/cthackers/adm-zip",
|
||||||
|
"author": "Nasca Iacob <sy@another-d-mention.ro> (https://github.com/cthackers)",
|
||||||
|
"bugs": {
|
||||||
|
"email": "sy@another-d-mention.ro",
|
||||||
|
"url": "https://github.com/cthackers/adm-zip/issues"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"files": [
|
||||||
|
"adm-zip.js",
|
||||||
|
"headers",
|
||||||
|
"methods",
|
||||||
|
"util",
|
||||||
|
"zipEntry.js",
|
||||||
|
"zipFile.js",
|
||||||
|
"LICENSE"
|
||||||
|
],
|
||||||
|
"main": "adm-zip.js",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/cthackers/adm-zip.git"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"chai": "^4.3.4",
|
||||||
|
"mocha": "^8.3.2",
|
||||||
|
"prettier": "^2.2.1",
|
||||||
|
"rimraf": "^3.0.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
142
node_modules/adm-zip/util/constants.js
generated
vendored
Normal file
142
node_modules/adm-zip/util/constants.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
module.exports = {
|
||||||
|
/* The local file header */
|
||||||
|
LOCHDR : 30, // LOC header size
|
||||||
|
LOCSIG : 0x04034b50, // "PK\003\004"
|
||||||
|
LOCVER : 4, // version needed to extract
|
||||||
|
LOCFLG : 6, // general purpose bit flag
|
||||||
|
LOCHOW : 8, // compression method
|
||||||
|
LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
|
||||||
|
LOCCRC : 14, // uncompressed file crc-32 value
|
||||||
|
LOCSIZ : 18, // compressed size
|
||||||
|
LOCLEN : 22, // uncompressed size
|
||||||
|
LOCNAM : 26, // filename length
|
||||||
|
LOCEXT : 28, // extra field length
|
||||||
|
|
||||||
|
/* The Data descriptor */
|
||||||
|
EXTSIG : 0x08074b50, // "PK\007\008"
|
||||||
|
EXTHDR : 16, // EXT header size
|
||||||
|
EXTCRC : 4, // uncompressed file crc-32 value
|
||||||
|
EXTSIZ : 8, // compressed size
|
||||||
|
EXTLEN : 12, // uncompressed size
|
||||||
|
|
||||||
|
/* The central directory file header */
|
||||||
|
CENHDR : 46, // CEN header size
|
||||||
|
CENSIG : 0x02014b50, // "PK\001\002"
|
||||||
|
CENVEM : 4, // version made by
|
||||||
|
CENVER : 6, // version needed to extract
|
||||||
|
CENFLG : 8, // encrypt, decrypt flags
|
||||||
|
CENHOW : 10, // compression method
|
||||||
|
CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
|
||||||
|
CENCRC : 16, // uncompressed file crc-32 value
|
||||||
|
CENSIZ : 20, // compressed size
|
||||||
|
CENLEN : 24, // uncompressed size
|
||||||
|
CENNAM : 28, // filename length
|
||||||
|
CENEXT : 30, // extra field length
|
||||||
|
CENCOM : 32, // file comment length
|
||||||
|
CENDSK : 34, // volume number start
|
||||||
|
CENATT : 36, // internal file attributes
|
||||||
|
CENATX : 38, // external file attributes (host system dependent)
|
||||||
|
CENOFF : 42, // LOC header offset
|
||||||
|
|
||||||
|
/* The entries in the end of central directory */
|
||||||
|
ENDHDR : 22, // END header size
|
||||||
|
ENDSIG : 0x06054b50, // "PK\005\006"
|
||||||
|
ENDSUB : 8, // number of entries on this disk
|
||||||
|
ENDTOT : 10, // total number of entries
|
||||||
|
ENDSIZ : 12, // central directory size in bytes
|
||||||
|
ENDOFF : 16, // offset of first CEN header
|
||||||
|
ENDCOM : 20, // zip file comment length
|
||||||
|
|
||||||
|
END64HDR : 20, // zip64 END header size
|
||||||
|
END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007"
|
||||||
|
END64START : 4, // number of the disk with the start of the zip64
|
||||||
|
END64OFF : 8, // relative offset of the zip64 end of central directory
|
||||||
|
END64NUMDISKS : 16, // total number of disks
|
||||||
|
|
||||||
|
ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006"
|
||||||
|
ZIP64HDR : 56, // zip64 record minimum size
|
||||||
|
ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
|
||||||
|
ZIP64SIZE : 4, // zip64 size of the central directory record
|
||||||
|
ZIP64VEM : 12, // zip64 version made by
|
||||||
|
ZIP64VER : 14, // zip64 version needed to extract
|
||||||
|
ZIP64DSK : 16, // zip64 number of this disk
|
||||||
|
ZIP64DSKDIR : 20, // number of the disk with the start of the record directory
|
||||||
|
ZIP64SUB : 24, // number of entries on this disk
|
||||||
|
ZIP64TOT : 32, // total number of entries
|
||||||
|
ZIP64SIZB : 40, // zip64 central directory size in bytes
|
||||||
|
ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number
|
||||||
|
ZIP64EXTRA : 56, // extensible data sector
|
||||||
|
|
||||||
|
/* Compression methods */
|
||||||
|
STORED : 0, // no compression
|
||||||
|
SHRUNK : 1, // shrunk
|
||||||
|
REDUCED1 : 2, // reduced with compression factor 1
|
||||||
|
REDUCED2 : 3, // reduced with compression factor 2
|
||||||
|
REDUCED3 : 4, // reduced with compression factor 3
|
||||||
|
REDUCED4 : 5, // reduced with compression factor 4
|
||||||
|
IMPLODED : 6, // imploded
|
||||||
|
// 7 reserved for Tokenizing compression algorithm
|
||||||
|
DEFLATED : 8, // deflated
|
||||||
|
ENHANCED_DEFLATED: 9, // enhanced deflated
|
||||||
|
PKWARE : 10,// PKWare DCL imploded
|
||||||
|
// 11 reserved by PKWARE
|
||||||
|
BZIP2 : 12, // compressed using BZIP2
|
||||||
|
// 13 reserved by PKWARE
|
||||||
|
LZMA : 14, // LZMA
|
||||||
|
// 15-17 reserved by PKWARE
|
||||||
|
IBM_TERSE : 18, // compressed using IBM TERSE
|
||||||
|
IBM_LZ77 : 19, // IBM LZ77 z
|
||||||
|
AES_ENCRYPT : 99, // WinZIP AES encryption method
|
||||||
|
|
||||||
|
/* General purpose bit flag */
|
||||||
|
// values can obtained with expression 2**bitnr
|
||||||
|
FLG_ENC : 1, // Bit 0: encrypted file
|
||||||
|
FLG_COMP1 : 2, // Bit 1, compression option
|
||||||
|
FLG_COMP2 : 4, // Bit 2, compression option
|
||||||
|
FLG_DESC : 8, // Bit 3, data descriptor
|
||||||
|
FLG_ENH : 16, // Bit 4, enhanced deflating
|
||||||
|
FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data.
|
||||||
|
FLG_STR : 64, // Bit 6, strong encryption (patented)
|
||||||
|
// Bits 7-10: Currently unused.
|
||||||
|
FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS)
|
||||||
|
// Bit 12: Reserved by PKWARE for enhanced compression.
|
||||||
|
// Bit 13: encrypted the Central Directory (patented).
|
||||||
|
// Bits 14-15: Reserved by PKWARE.
|
||||||
|
FLG_MSK : 4096, // mask header values
|
||||||
|
|
||||||
|
/* Load type */
|
||||||
|
FILE : 2,
|
||||||
|
BUFFER : 1,
|
||||||
|
NONE : 0,
|
||||||
|
|
||||||
|
/* 4.5 Extensible data fields */
|
||||||
|
EF_ID : 0,
|
||||||
|
EF_SIZE : 2,
|
||||||
|
|
||||||
|
/* Header IDs */
|
||||||
|
ID_ZIP64 : 0x0001,
|
||||||
|
ID_AVINFO : 0x0007,
|
||||||
|
ID_PFS : 0x0008,
|
||||||
|
ID_OS2 : 0x0009,
|
||||||
|
ID_NTFS : 0x000a,
|
||||||
|
ID_OPENVMS : 0x000c,
|
||||||
|
ID_UNIX : 0x000d,
|
||||||
|
ID_FORK : 0x000e,
|
||||||
|
ID_PATCH : 0x000f,
|
||||||
|
ID_X509_PKCS7 : 0x0014,
|
||||||
|
ID_X509_CERTID_F : 0x0015,
|
||||||
|
ID_X509_CERTID_C : 0x0016,
|
||||||
|
ID_STRONGENC : 0x0017,
|
||||||
|
ID_RECORD_MGT : 0x0018,
|
||||||
|
ID_X509_PKCS7_RL : 0x0019,
|
||||||
|
ID_IBM1 : 0x0065,
|
||||||
|
ID_IBM2 : 0x0066,
|
||||||
|
ID_POSZIP : 0x4690,
|
||||||
|
|
||||||
|
EF_ZIP64_OR_32 : 0xffffffff,
|
||||||
|
EF_ZIP64_OR_16 : 0xffff,
|
||||||
|
EF_ZIP64_SUNCOMP : 0,
|
||||||
|
EF_ZIP64_SCOMP : 8,
|
||||||
|
EF_ZIP64_RHO : 16,
|
||||||
|
EF_ZIP64_DSN : 24
|
||||||
|
};
|
||||||
35
node_modules/adm-zip/util/errors.js
generated
vendored
Normal file
35
node_modules/adm-zip/util/errors.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
module.exports = {
|
||||||
|
/* Header error messages */
|
||||||
|
INVALID_LOC: "Invalid LOC header (bad signature)",
|
||||||
|
INVALID_CEN: "Invalid CEN header (bad signature)",
|
||||||
|
INVALID_END: "Invalid END header (bad signature)",
|
||||||
|
|
||||||
|
/* ZipEntry error messages*/
|
||||||
|
NO_DATA: "Nothing to decompress",
|
||||||
|
BAD_CRC: "CRC32 checksum failed",
|
||||||
|
FILE_IN_THE_WAY: "There is a file in the way: %s",
|
||||||
|
UNKNOWN_METHOD: "Invalid/unsupported compression method",
|
||||||
|
|
||||||
|
/* Inflater error messages */
|
||||||
|
AVAIL_DATA: "inflate::Available inflate data did not terminate",
|
||||||
|
INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block",
|
||||||
|
TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes",
|
||||||
|
INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths",
|
||||||
|
INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length",
|
||||||
|
INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete",
|
||||||
|
INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths",
|
||||||
|
INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths",
|
||||||
|
INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement",
|
||||||
|
INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)",
|
||||||
|
|
||||||
|
/* ADM-ZIP error messages */
|
||||||
|
CANT_EXTRACT_FILE: "Could not extract the file",
|
||||||
|
CANT_OVERRIDE: "Target file already exists",
|
||||||
|
NO_ZIP: "No zip file was loaded",
|
||||||
|
NO_ENTRY: "Entry doesn't exist",
|
||||||
|
DIRECTORY_CONTENT_ERROR: "A directory cannot have content",
|
||||||
|
FILE_NOT_FOUND: "File not found: %s",
|
||||||
|
NOT_IMPLEMENTED: "Not implemented",
|
||||||
|
INVALID_FILENAME: "Invalid filename",
|
||||||
|
INVALID_FORMAT: "Invalid or unsupported zip format. No END header found"
|
||||||
|
};
|
||||||
79
node_modules/adm-zip/util/fattr.js
generated
vendored
Normal file
79
node_modules/adm-zip/util/fattr.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
const fs = require("./fileSystem").require();
|
||||||
|
const pth = require("path");
|
||||||
|
|
||||||
|
fs.existsSync = fs.existsSync || pth.existsSync;
|
||||||
|
|
||||||
|
module.exports = function (/*String*/ path) {
|
||||||
|
var _path = path || "",
|
||||||
|
_obj = newAttr(),
|
||||||
|
_stat = null;
|
||||||
|
|
||||||
|
function newAttr() {
|
||||||
|
return {
|
||||||
|
directory: false,
|
||||||
|
readonly: false,
|
||||||
|
hidden: false,
|
||||||
|
executable: false,
|
||||||
|
mtime: 0,
|
||||||
|
atime: 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_path && fs.existsSync(_path)) {
|
||||||
|
_stat = fs.statSync(_path);
|
||||||
|
_obj.directory = _stat.isDirectory();
|
||||||
|
_obj.mtime = _stat.mtime;
|
||||||
|
_obj.atime = _stat.atime;
|
||||||
|
_obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner
|
||||||
|
_obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right
|
||||||
|
_obj.hidden = pth.basename(_path)[0] === ".";
|
||||||
|
} else {
|
||||||
|
console.warn("Invalid path: " + _path);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
get directory() {
|
||||||
|
return _obj.directory;
|
||||||
|
},
|
||||||
|
|
||||||
|
get readOnly() {
|
||||||
|
return _obj.readonly;
|
||||||
|
},
|
||||||
|
|
||||||
|
get hidden() {
|
||||||
|
return _obj.hidden;
|
||||||
|
},
|
||||||
|
|
||||||
|
get mtime() {
|
||||||
|
return _obj.mtime;
|
||||||
|
},
|
||||||
|
|
||||||
|
get atime() {
|
||||||
|
return _obj.atime;
|
||||||
|
},
|
||||||
|
|
||||||
|
get executable() {
|
||||||
|
return _obj.executable;
|
||||||
|
},
|
||||||
|
|
||||||
|
decodeAttributes: function () {},
|
||||||
|
|
||||||
|
encodeAttributes: function () {},
|
||||||
|
|
||||||
|
toJSON: function () {
|
||||||
|
return {
|
||||||
|
path: _path,
|
||||||
|
isDirectory: _obj.directory,
|
||||||
|
isReadOnly: _obj.readonly,
|
||||||
|
isHidden: _obj.hidden,
|
||||||
|
isExecutable: _obj.executable,
|
||||||
|
mTime: _obj.mtime,
|
||||||
|
aTime: _obj.atime
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
toString: function () {
|
||||||
|
return JSON.stringify(this.toJSON(), null, "\t");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
11
node_modules/adm-zip/util/fileSystem.js
generated
vendored
Normal file
11
node_modules/adm-zip/util/fileSystem.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
exports.require = function () {
|
||||||
|
if (typeof process === "object" && process.versions && process.versions["electron"]) {
|
||||||
|
try {
|
||||||
|
const originalFs = require("original-fs");
|
||||||
|
if (Object.keys(originalFs).length > 0) {
|
||||||
|
return originalFs;
|
||||||
|
}
|
||||||
|
} catch (e) {}
|
||||||
|
}
|
||||||
|
return require("fs");
|
||||||
|
};
|
||||||
4
node_modules/adm-zip/util/index.js
generated
vendored
Normal file
4
node_modules/adm-zip/util/index.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
module.exports = require("./utils");
|
||||||
|
module.exports.Constants = require("./constants");
|
||||||
|
module.exports.Errors = require("./errors");
|
||||||
|
module.exports.FileAttr = require("./fattr");
|
||||||
246
node_modules/adm-zip/util/utils.js
generated
vendored
Normal file
246
node_modules/adm-zip/util/utils.js
generated
vendored
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
const fsystem = require("./fileSystem").require();
|
||||||
|
const pth = require("path");
|
||||||
|
const Constants = require("./constants");
|
||||||
|
const isWin = typeof process === "object" && "win32" === process.platform;
|
||||||
|
|
||||||
|
const is_Obj = (obj) => obj && typeof obj === "object";
|
||||||
|
|
||||||
|
// generate CRC32 lookup table
|
||||||
|
const crcTable = new Uint32Array(256).map((t, c) => {
|
||||||
|
for (let k = 0; k < 8; k++) {
|
||||||
|
if ((c & 1) !== 0) {
|
||||||
|
c = 0xedb88320 ^ (c >>> 1);
|
||||||
|
} else {
|
||||||
|
c >>>= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return c >>> 0;
|
||||||
|
});
|
||||||
|
|
||||||
|
// UTILS functions
|
||||||
|
|
||||||
|
function Utils(opts) {
|
||||||
|
this.sep = pth.sep;
|
||||||
|
this.fs = fsystem;
|
||||||
|
|
||||||
|
if (is_Obj(opts)) {
|
||||||
|
// custom filesystem
|
||||||
|
if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") {
|
||||||
|
this.fs = opts.fs;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Utils;
|
||||||
|
|
||||||
|
// INSTANCED functions
|
||||||
|
|
||||||
|
Utils.prototype.makeDir = function (/*String*/ folder) {
|
||||||
|
const self = this;
|
||||||
|
|
||||||
|
// Sync - make directories tree
|
||||||
|
function mkdirSync(/*String*/ fpath) {
|
||||||
|
let resolvedPath = fpath.split(self.sep)[0];
|
||||||
|
fpath.split(self.sep).forEach(function (name) {
|
||||||
|
if (!name || name.substr(-1, 1) === ":") return;
|
||||||
|
resolvedPath += self.sep + name;
|
||||||
|
var stat;
|
||||||
|
try {
|
||||||
|
stat = self.fs.statSync(resolvedPath);
|
||||||
|
} catch (e) {
|
||||||
|
self.fs.mkdirSync(resolvedPath);
|
||||||
|
}
|
||||||
|
if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
mkdirSync(folder);
|
||||||
|
};
|
||||||
|
|
||||||
|
Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) {
|
||||||
|
const self = this;
|
||||||
|
if (self.fs.existsSync(path)) {
|
||||||
|
if (!overwrite) return false; // cannot overwrite
|
||||||
|
|
||||||
|
var stat = self.fs.statSync(path);
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var folder = pth.dirname(path);
|
||||||
|
if (!self.fs.existsSync(folder)) {
|
||||||
|
self.makeDir(folder);
|
||||||
|
}
|
||||||
|
|
||||||
|
var fd;
|
||||||
|
try {
|
||||||
|
fd = self.fs.openSync(path, "w", 438); // 0666
|
||||||
|
} catch (e) {
|
||||||
|
self.fs.chmodSync(path, 438);
|
||||||
|
fd = self.fs.openSync(path, "w", 438);
|
||||||
|
}
|
||||||
|
if (fd) {
|
||||||
|
try {
|
||||||
|
self.fs.writeSync(fd, content, 0, content.length, 0);
|
||||||
|
} finally {
|
||||||
|
self.fs.closeSync(fd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.fs.chmodSync(path, attr || 438);
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) {
|
||||||
|
if (typeof attr === "function") {
|
||||||
|
callback = attr;
|
||||||
|
attr = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const self = this;
|
||||||
|
|
||||||
|
self.fs.exists(path, function (exist) {
|
||||||
|
if (exist && !overwrite) return callback(false);
|
||||||
|
|
||||||
|
self.fs.stat(path, function (err, stat) {
|
||||||
|
if (exist && stat.isDirectory()) {
|
||||||
|
return callback(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
var folder = pth.dirname(path);
|
||||||
|
self.fs.exists(folder, function (exists) {
|
||||||
|
if (!exists) self.makeDir(folder);
|
||||||
|
|
||||||
|
self.fs.open(path, "w", 438, function (err, fd) {
|
||||||
|
if (err) {
|
||||||
|
self.fs.chmod(path, 438, function () {
|
||||||
|
self.fs.open(path, "w", 438, function (err, fd) {
|
||||||
|
self.fs.write(fd, content, 0, content.length, 0, function () {
|
||||||
|
self.fs.close(fd, function () {
|
||||||
|
self.fs.chmod(path, attr || 438, function () {
|
||||||
|
callback(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else if (fd) {
|
||||||
|
self.fs.write(fd, content, 0, content.length, 0, function () {
|
||||||
|
self.fs.close(fd, function () {
|
||||||
|
self.fs.chmod(path, attr || 438, function () {
|
||||||
|
callback(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
self.fs.chmod(path, attr || 438, function () {
|
||||||
|
callback(true);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
Utils.prototype.findFiles = function (/*String*/ path) {
|
||||||
|
const self = this;
|
||||||
|
|
||||||
|
function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) {
|
||||||
|
if (typeof pattern === "boolean") {
|
||||||
|
recursive = pattern;
|
||||||
|
pattern = undefined;
|
||||||
|
}
|
||||||
|
let files = [];
|
||||||
|
self.fs.readdirSync(dir).forEach(function (file) {
|
||||||
|
var path = pth.join(dir, file);
|
||||||
|
|
||||||
|
if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive));
|
||||||
|
|
||||||
|
if (!pattern || pattern.test(path)) {
|
||||||
|
files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : ""));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
|
return findSync(path, undefined, true);
|
||||||
|
};
|
||||||
|
|
||||||
|
Utils.prototype.getAttributes = function () {};
|
||||||
|
|
||||||
|
Utils.prototype.setAttributes = function () {};
|
||||||
|
|
||||||
|
// STATIC functions
|
||||||
|
|
||||||
|
// crc32 single update (it is part of crc32)
|
||||||
|
Utils.crc32update = function (crc, byte) {
|
||||||
|
return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8);
|
||||||
|
};
|
||||||
|
|
||||||
|
Utils.crc32 = function (buf) {
|
||||||
|
if (typeof buf === "string") {
|
||||||
|
buf = Buffer.from(buf, "utf8");
|
||||||
|
}
|
||||||
|
// Generate crcTable
|
||||||
|
if (!crcTable.length) genCRCTable();
|
||||||
|
|
||||||
|
let len = buf.length;
|
||||||
|
let crc = ~0;
|
||||||
|
for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]);
|
||||||
|
// xor and cast as uint32 number
|
||||||
|
return ~crc >>> 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
Utils.methodToString = function (/*Number*/ method) {
|
||||||
|
switch (method) {
|
||||||
|
case Constants.STORED:
|
||||||
|
return "STORED (" + method + ")";
|
||||||
|
case Constants.DEFLATED:
|
||||||
|
return "DEFLATED (" + method + ")";
|
||||||
|
default:
|
||||||
|
return "UNSUPPORTED (" + method + ")";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// removes ".." style path elements
|
||||||
|
Utils.canonical = function (/*string*/ path) {
|
||||||
|
if (!path) return "";
|
||||||
|
// trick normalize think path is absolute
|
||||||
|
var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
|
||||||
|
return pth.join(".", safeSuffix);
|
||||||
|
};
|
||||||
|
|
||||||
|
// make abolute paths taking prefix as root folder
|
||||||
|
Utils.sanitize = function (/*string*/ prefix, /*string*/ name) {
|
||||||
|
prefix = pth.resolve(pth.normalize(prefix));
|
||||||
|
var parts = name.split("/");
|
||||||
|
for (var i = 0, l = parts.length; i < l; i++) {
|
||||||
|
var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));
|
||||||
|
if (path.indexOf(prefix) === 0) {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return pth.normalize(pth.join(prefix, pth.basename(name)));
|
||||||
|
};
|
||||||
|
|
||||||
|
// converts buffer, Uint8Array, string types to buffer
|
||||||
|
Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) {
|
||||||
|
if (Buffer.isBuffer(input)) {
|
||||||
|
return input;
|
||||||
|
} else if (input instanceof Uint8Array) {
|
||||||
|
return Buffer.from(input);
|
||||||
|
} else {
|
||||||
|
// expect string all other values are invalid and return empty buffer
|
||||||
|
return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) {
|
||||||
|
var slice = Buffer.from(buffer.slice(index, index + 8));
|
||||||
|
slice.swap64();
|
||||||
|
|
||||||
|
return parseInt(`0x${slice.toString("hex")}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
Utils.isWin = isWin; // Do we have windows system
|
||||||
|
Utils.crcTable = crcTable;
|
||||||
333
node_modules/adm-zip/zipEntry.js
generated
vendored
Normal file
333
node_modules/adm-zip/zipEntry.js
generated
vendored
Normal file
@@ -0,0 +1,333 @@
|
|||||||
|
var Utils = require("./util"),
|
||||||
|
Headers = require("./headers"),
|
||||||
|
Constants = Utils.Constants,
|
||||||
|
Methods = require("./methods");
|
||||||
|
|
||||||
|
module.exports = function (/*Buffer*/ input) {
|
||||||
|
var _entryHeader = new Headers.EntryHeader(),
|
||||||
|
_entryName = Buffer.alloc(0),
|
||||||
|
_comment = Buffer.alloc(0),
|
||||||
|
_isDirectory = false,
|
||||||
|
uncompressedData = null,
|
||||||
|
_extra = Buffer.alloc(0);
|
||||||
|
|
||||||
|
function getCompressedDataFromZip() {
|
||||||
|
if (!input || !Buffer.isBuffer(input)) {
|
||||||
|
return Buffer.alloc(0);
|
||||||
|
}
|
||||||
|
_entryHeader.loadDataHeaderFromBinary(input);
|
||||||
|
return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
function crc32OK(data) {
|
||||||
|
// if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
|
||||||
|
if ((_entryHeader.flags & 0x8) !== 0x8) {
|
||||||
|
if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// @TODO: load and check data descriptor header
|
||||||
|
// The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure
|
||||||
|
// (optionally preceded by a 4-byte signature) immediately after the compressed data:
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) {
|
||||||
|
if (typeof callback === "undefined" && typeof async === "string") {
|
||||||
|
pass = async;
|
||||||
|
async = void 0;
|
||||||
|
}
|
||||||
|
if (_isDirectory) {
|
||||||
|
if (async && callback) {
|
||||||
|
callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.
|
||||||
|
}
|
||||||
|
return Buffer.alloc(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
var compressedData = getCompressedDataFromZip();
|
||||||
|
|
||||||
|
if (compressedData.length === 0) {
|
||||||
|
// File is empty, nothing to decompress.
|
||||||
|
if (async && callback) callback(compressedData);
|
||||||
|
return compressedData;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_entryHeader.encripted) {
|
||||||
|
if ("string" !== typeof pass && !Buffer.isBuffer(pass)) {
|
||||||
|
throw new Error("ADM-ZIP: Incompatible password parameter");
|
||||||
|
}
|
||||||
|
compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass);
|
||||||
|
}
|
||||||
|
|
||||||
|
var data = Buffer.alloc(_entryHeader.size);
|
||||||
|
|
||||||
|
switch (_entryHeader.method) {
|
||||||
|
case Utils.Constants.STORED:
|
||||||
|
compressedData.copy(data);
|
||||||
|
if (!crc32OK(data)) {
|
||||||
|
if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error
|
||||||
|
throw new Error(Utils.Errors.BAD_CRC);
|
||||||
|
} else {
|
||||||
|
//si added otherwise did not seem to return data.
|
||||||
|
if (async && callback) callback(data);
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
case Utils.Constants.DEFLATED:
|
||||||
|
var inflater = new Methods.Inflater(compressedData);
|
||||||
|
if (!async) {
|
||||||
|
const result = inflater.inflate(data);
|
||||||
|
result.copy(data, 0);
|
||||||
|
if (!crc32OK(data)) {
|
||||||
|
throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString());
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
} else {
|
||||||
|
inflater.inflateAsync(function (result) {
|
||||||
|
result.copy(result, 0);
|
||||||
|
if (callback) {
|
||||||
|
if (!crc32OK(result)) {
|
||||||
|
callback(result, Utils.Errors.BAD_CRC); //si added error
|
||||||
|
} else {
|
||||||
|
callback(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD);
|
||||||
|
throw new Error(Utils.Errors.UNKNOWN_METHOD);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function compress(/*Boolean*/ async, /*Function*/ callback) {
|
||||||
|
if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
|
||||||
|
// no data set or the data wasn't changed to require recompression
|
||||||
|
if (async && callback) callback(getCompressedDataFromZip());
|
||||||
|
return getCompressedDataFromZip();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (uncompressedData.length && !_isDirectory) {
|
||||||
|
var compressedData;
|
||||||
|
// Local file header
|
||||||
|
switch (_entryHeader.method) {
|
||||||
|
case Utils.Constants.STORED:
|
||||||
|
_entryHeader.compressedSize = _entryHeader.size;
|
||||||
|
|
||||||
|
compressedData = Buffer.alloc(uncompressedData.length);
|
||||||
|
uncompressedData.copy(compressedData);
|
||||||
|
|
||||||
|
if (async && callback) callback(compressedData);
|
||||||
|
return compressedData;
|
||||||
|
default:
|
||||||
|
case Utils.Constants.DEFLATED:
|
||||||
|
var deflater = new Methods.Deflater(uncompressedData);
|
||||||
|
if (!async) {
|
||||||
|
var deflated = deflater.deflate();
|
||||||
|
_entryHeader.compressedSize = deflated.length;
|
||||||
|
return deflated;
|
||||||
|
} else {
|
||||||
|
deflater.deflateAsync(function (data) {
|
||||||
|
compressedData = Buffer.alloc(data.length);
|
||||||
|
_entryHeader.compressedSize = data.length;
|
||||||
|
data.copy(compressedData);
|
||||||
|
callback && callback(compressedData);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
deflater = null;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else if (async && callback) {
|
||||||
|
callback(Buffer.alloc(0));
|
||||||
|
} else {
|
||||||
|
return Buffer.alloc(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function readUInt64LE(buffer, offset) {
|
||||||
|
return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseExtra(data) {
|
||||||
|
var offset = 0;
|
||||||
|
var signature, size, part;
|
||||||
|
while (offset < data.length) {
|
||||||
|
signature = data.readUInt16LE(offset);
|
||||||
|
offset += 2;
|
||||||
|
size = data.readUInt16LE(offset);
|
||||||
|
offset += 2;
|
||||||
|
part = data.slice(offset, offset + size);
|
||||||
|
offset += size;
|
||||||
|
if (Constants.ID_ZIP64 === signature) {
|
||||||
|
parseZip64ExtendedInformation(part);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Override header field values with values from the ZIP64 extra field
|
||||||
|
function parseZip64ExtendedInformation(data) {
|
||||||
|
var size, compressedSize, offset, diskNumStart;
|
||||||
|
|
||||||
|
if (data.length >= Constants.EF_ZIP64_SCOMP) {
|
||||||
|
size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);
|
||||||
|
if (_entryHeader.size === Constants.EF_ZIP64_OR_32) {
|
||||||
|
_entryHeader.size = size;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (data.length >= Constants.EF_ZIP64_RHO) {
|
||||||
|
compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);
|
||||||
|
if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) {
|
||||||
|
_entryHeader.compressedSize = compressedSize;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (data.length >= Constants.EF_ZIP64_DSN) {
|
||||||
|
offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);
|
||||||
|
if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) {
|
||||||
|
_entryHeader.offset = offset;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (data.length >= Constants.EF_ZIP64_DSN + 4) {
|
||||||
|
diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);
|
||||||
|
if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {
|
||||||
|
_entryHeader.diskNumStart = diskNumStart;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
get entryName() {
|
||||||
|
return _entryName.toString();
|
||||||
|
},
|
||||||
|
get rawEntryName() {
|
||||||
|
return _entryName;
|
||||||
|
},
|
||||||
|
set entryName(val) {
|
||||||
|
_entryName = Utils.toBuffer(val);
|
||||||
|
var lastChar = _entryName[_entryName.length - 1];
|
||||||
|
_isDirectory = lastChar === 47 || lastChar === 92;
|
||||||
|
_entryHeader.fileNameLength = _entryName.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
get extra() {
|
||||||
|
return _extra;
|
||||||
|
},
|
||||||
|
set extra(val) {
|
||||||
|
_extra = val;
|
||||||
|
_entryHeader.extraLength = val.length;
|
||||||
|
parseExtra(val);
|
||||||
|
},
|
||||||
|
|
||||||
|
get comment() {
|
||||||
|
return _comment.toString();
|
||||||
|
},
|
||||||
|
set comment(val) {
|
||||||
|
_comment = Utils.toBuffer(val);
|
||||||
|
_entryHeader.commentLength = _comment.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
get name() {
|
||||||
|
var n = _entryName.toString();
|
||||||
|
return _isDirectory
|
||||||
|
? n
|
||||||
|
.substr(n.length - 1)
|
||||||
|
.split("/")
|
||||||
|
.pop()
|
||||||
|
: n.split("/").pop();
|
||||||
|
},
|
||||||
|
get isDirectory() {
|
||||||
|
return _isDirectory;
|
||||||
|
},
|
||||||
|
|
||||||
|
getCompressedData: function () {
|
||||||
|
return compress(false, null);
|
||||||
|
},
|
||||||
|
|
||||||
|
getCompressedDataAsync: function (/*Function*/ callback) {
|
||||||
|
compress(true, callback);
|
||||||
|
},
|
||||||
|
|
||||||
|
setData: function (value) {
|
||||||
|
uncompressedData = Utils.toBuffer(value);
|
||||||
|
if (!_isDirectory && uncompressedData.length) {
|
||||||
|
_entryHeader.size = uncompressedData.length;
|
||||||
|
_entryHeader.method = Utils.Constants.DEFLATED;
|
||||||
|
_entryHeader.crc = Utils.crc32(value);
|
||||||
|
_entryHeader.changed = true;
|
||||||
|
} else {
|
||||||
|
// folders and blank files should be stored
|
||||||
|
_entryHeader.method = Utils.Constants.STORED;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
getData: function (pass) {
|
||||||
|
if (_entryHeader.changed) {
|
||||||
|
return uncompressedData;
|
||||||
|
} else {
|
||||||
|
return decompress(false, null, pass);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
getDataAsync: function (/*Function*/ callback, pass) {
|
||||||
|
if (_entryHeader.changed) {
|
||||||
|
callback(uncompressedData);
|
||||||
|
} else {
|
||||||
|
decompress(true, callback, pass);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
set attr(attr) {
|
||||||
|
_entryHeader.attr = attr;
|
||||||
|
},
|
||||||
|
get attr() {
|
||||||
|
return _entryHeader.attr;
|
||||||
|
},
|
||||||
|
|
||||||
|
set header(/*Buffer*/ data) {
|
||||||
|
_entryHeader.loadFromBinary(data);
|
||||||
|
},
|
||||||
|
|
||||||
|
get header() {
|
||||||
|
return _entryHeader;
|
||||||
|
},
|
||||||
|
|
||||||
|
packHeader: function () {
|
||||||
|
// 1. create header (buffer)
|
||||||
|
var header = _entryHeader.entryHeaderToBinary();
|
||||||
|
var addpos = Utils.Constants.CENHDR;
|
||||||
|
// 2. add file name
|
||||||
|
_entryName.copy(header, addpos);
|
||||||
|
addpos += _entryName.length;
|
||||||
|
// 3. add extra data
|
||||||
|
if (_entryHeader.extraLength) {
|
||||||
|
_extra.copy(header, addpos);
|
||||||
|
addpos += _entryHeader.extraLength;
|
||||||
|
}
|
||||||
|
// 4. add file comment
|
||||||
|
if (_entryHeader.commentLength) {
|
||||||
|
_comment.copy(header, addpos);
|
||||||
|
}
|
||||||
|
return header;
|
||||||
|
},
|
||||||
|
|
||||||
|
toJSON: function () {
|
||||||
|
const bytes = function (nr) {
|
||||||
|
return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">";
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
entryName: this.entryName,
|
||||||
|
name: this.name,
|
||||||
|
comment: this.comment,
|
||||||
|
isDirectory: this.isDirectory,
|
||||||
|
header: _entryHeader.toJSON(),
|
||||||
|
compressedData: bytes(input),
|
||||||
|
data: bytes(uncompressedData)
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
toString: function () {
|
||||||
|
return JSON.stringify(this.toJSON(), null, "\t");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
384
node_modules/adm-zip/zipFile.js
generated
vendored
Normal file
384
node_modules/adm-zip/zipFile.js
generated
vendored
Normal file
@@ -0,0 +1,384 @@
|
|||||||
|
const ZipEntry = require("./zipEntry");
|
||||||
|
const Headers = require("./headers");
|
||||||
|
const Utils = require("./util");
|
||||||
|
|
||||||
|
module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) {
|
||||||
|
var entryList = [],
|
||||||
|
entryTable = {},
|
||||||
|
_comment = Buffer.alloc(0),
|
||||||
|
mainHeader = new Headers.MainHeader(),
|
||||||
|
loadedEntries = false;
|
||||||
|
|
||||||
|
// assign options
|
||||||
|
const opts = Object.assign(Object.create(null), options);
|
||||||
|
|
||||||
|
const { noSort } = opts;
|
||||||
|
|
||||||
|
if (inBuffer) {
|
||||||
|
// is a memory buffer
|
||||||
|
readMainHeader(opts.readEntries);
|
||||||
|
} else {
|
||||||
|
// none. is a new file
|
||||||
|
loadedEntries = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function iterateEntries(callback) {
|
||||||
|
const totalEntries = mainHeader.diskEntries; // total number of entries
|
||||||
|
let index = mainHeader.offset; // offset of first CEN header
|
||||||
|
|
||||||
|
for (let i = 0; i < totalEntries; i++) {
|
||||||
|
let tmp = index;
|
||||||
|
const entry = new ZipEntry(inBuffer);
|
||||||
|
|
||||||
|
entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR));
|
||||||
|
entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength));
|
||||||
|
|
||||||
|
index += entry.header.entryHeaderSize;
|
||||||
|
|
||||||
|
callback(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function readEntries() {
|
||||||
|
loadedEntries = true;
|
||||||
|
entryTable = {};
|
||||||
|
entryList = new Array(mainHeader.diskEntries); // total number of entries
|
||||||
|
var index = mainHeader.offset; // offset of first CEN header
|
||||||
|
for (var i = 0; i < entryList.length; i++) {
|
||||||
|
var tmp = index,
|
||||||
|
entry = new ZipEntry(inBuffer);
|
||||||
|
entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR));
|
||||||
|
|
||||||
|
entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength));
|
||||||
|
|
||||||
|
if (entry.header.extraLength) {
|
||||||
|
entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
|
||||||
|
|
||||||
|
index += entry.header.entryHeaderSize;
|
||||||
|
|
||||||
|
entryList[i] = entry;
|
||||||
|
entryTable[entry.entryName] = entry;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function readMainHeader(/*Boolean*/ readNow) {
|
||||||
|
var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
|
||||||
|
max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length
|
||||||
|
n = max,
|
||||||
|
endStart = inBuffer.length,
|
||||||
|
endOffset = -1, // Start offset of the END header
|
||||||
|
commentEnd = 0;
|
||||||
|
|
||||||
|
for (i; i >= n; i--) {
|
||||||
|
if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'
|
||||||
|
if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) {
|
||||||
|
// "PK\005\006"
|
||||||
|
endOffset = i;
|
||||||
|
commentEnd = i;
|
||||||
|
endStart = i + Utils.Constants.ENDHDR;
|
||||||
|
// We already found a regular signature, let's look just a bit further to check if there's any zip64 signature
|
||||||
|
n = i - Utils.Constants.END64HDR;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {
|
||||||
|
// Found a zip64 signature, let's continue reading the whole zip64 record
|
||||||
|
n = max;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) {
|
||||||
|
// Found the zip64 record, let's determine it's size
|
||||||
|
endOffset = i;
|
||||||
|
endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT);
|
||||||
|
|
||||||
|
mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));
|
||||||
|
if (mainHeader.commentLength) {
|
||||||
|
_comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);
|
||||||
|
}
|
||||||
|
if (readNow) readEntries();
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortEntries() {
|
||||||
|
if (entryList.length > 1 && !noSort) {
|
||||||
|
entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
/**
|
||||||
|
* Returns an array of ZipEntry objects existent in the current opened archive
|
||||||
|
* @return Array
|
||||||
|
*/
|
||||||
|
get entries() {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
readEntries();
|
||||||
|
}
|
||||||
|
return entryList;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Archive comment
|
||||||
|
* @return {String}
|
||||||
|
*/
|
||||||
|
get comment() {
|
||||||
|
return _comment.toString();
|
||||||
|
},
|
||||||
|
set comment(val) {
|
||||||
|
_comment = Utils.toBuffer(val);
|
||||||
|
mainHeader.commentLength = _comment.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
getEntryCount: function () {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
return mainHeader.diskEntries;
|
||||||
|
}
|
||||||
|
|
||||||
|
return entryList.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
forEach: function (callback) {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
iterateEntries(callback);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
entryList.forEach(callback);
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a reference to the entry with the given name or null if entry is inexistent
|
||||||
|
*
|
||||||
|
* @param entryName
|
||||||
|
* @return ZipEntry
|
||||||
|
*/
|
||||||
|
getEntry: function (/*String*/ entryName) {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
readEntries();
|
||||||
|
}
|
||||||
|
return entryTable[entryName] || null;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds the given entry to the entry list
|
||||||
|
*
|
||||||
|
* @param entry
|
||||||
|
*/
|
||||||
|
setEntry: function (/*ZipEntry*/ entry) {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
readEntries();
|
||||||
|
}
|
||||||
|
entryList.push(entry);
|
||||||
|
entryTable[entry.entryName] = entry;
|
||||||
|
mainHeader.totalEntries = entryList.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes the entry with the given name from the entry list.
|
||||||
|
*
|
||||||
|
* If the entry is a directory, then all nested files and directories will be removed
|
||||||
|
* @param entryName
|
||||||
|
*/
|
||||||
|
deleteEntry: function (/*String*/ entryName) {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
readEntries();
|
||||||
|
}
|
||||||
|
var entry = entryTable[entryName];
|
||||||
|
if (entry && entry.isDirectory) {
|
||||||
|
var _self = this;
|
||||||
|
this.getEntryChildren(entry).forEach(function (child) {
|
||||||
|
if (child.entryName !== entryName) {
|
||||||
|
_self.deleteEntry(child.entryName);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
entryList.splice(entryList.indexOf(entry), 1);
|
||||||
|
delete entryTable[entryName];
|
||||||
|
mainHeader.totalEntries = entryList.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterates and returns all nested files and directories of the given entry
|
||||||
|
*
|
||||||
|
* @param entry
|
||||||
|
* @return Array
|
||||||
|
*/
|
||||||
|
getEntryChildren: function (/*ZipEntry*/ entry) {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
readEntries();
|
||||||
|
}
|
||||||
|
if (entry && entry.isDirectory) {
|
||||||
|
const list = [];
|
||||||
|
const name = entry.entryName;
|
||||||
|
const len = name.length;
|
||||||
|
|
||||||
|
entryList.forEach(function (zipEntry) {
|
||||||
|
if (zipEntry.entryName.substr(0, len) === name) {
|
||||||
|
list.push(zipEntry);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the zip file
|
||||||
|
*
|
||||||
|
* @return Buffer
|
||||||
|
*/
|
||||||
|
compressToBuffer: function () {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
readEntries();
|
||||||
|
}
|
||||||
|
sortEntries();
|
||||||
|
|
||||||
|
const dataBlock = [];
|
||||||
|
const entryHeaders = [];
|
||||||
|
let totalSize = 0;
|
||||||
|
let dindex = 0;
|
||||||
|
|
||||||
|
mainHeader.size = 0;
|
||||||
|
mainHeader.offset = 0;
|
||||||
|
|
||||||
|
for (const entry of entryList) {
|
||||||
|
// compress data and set local and entry header accordingly. Reason why is called first
|
||||||
|
const compressedData = entry.getCompressedData();
|
||||||
|
// 1. construct data header
|
||||||
|
entry.header.offset = dindex;
|
||||||
|
const dataHeader = entry.header.dataHeaderToBinary();
|
||||||
|
const entryNameLen = entry.rawEntryName.length;
|
||||||
|
// 1.2. postheader - data after data header
|
||||||
|
const postHeader = Buffer.alloc(entryNameLen + entry.extra.length);
|
||||||
|
entry.rawEntryName.copy(postHeader, 0);
|
||||||
|
postHeader.copy(entry.extra, entryNameLen);
|
||||||
|
|
||||||
|
// 2. offsets
|
||||||
|
const dataLength = dataHeader.length + postHeader.length + compressedData.length;
|
||||||
|
dindex += dataLength;
|
||||||
|
|
||||||
|
// 3. store values in sequence
|
||||||
|
dataBlock.push(dataHeader);
|
||||||
|
dataBlock.push(postHeader);
|
||||||
|
dataBlock.push(compressedData);
|
||||||
|
|
||||||
|
// 4. construct entry header
|
||||||
|
const entryHeader = entry.packHeader();
|
||||||
|
entryHeaders.push(entryHeader);
|
||||||
|
// 5. update main header
|
||||||
|
mainHeader.size += entryHeader.length;
|
||||||
|
totalSize += dataLength + entryHeader.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
|
||||||
|
// point to end of data and beginning of central directory first record
|
||||||
|
mainHeader.offset = dindex;
|
||||||
|
|
||||||
|
dindex = 0;
|
||||||
|
const outBuffer = Buffer.alloc(totalSize);
|
||||||
|
// write data blocks
|
||||||
|
for (const content of dataBlock) {
|
||||||
|
content.copy(outBuffer, dindex);
|
||||||
|
dindex += content.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
// write central directory entries
|
||||||
|
for (const content of entryHeaders) {
|
||||||
|
content.copy(outBuffer, dindex);
|
||||||
|
dindex += content.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
// write main header
|
||||||
|
const mh = mainHeader.toBinary();
|
||||||
|
if (_comment) {
|
||||||
|
_comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
|
||||||
|
}
|
||||||
|
mh.copy(outBuffer, dindex);
|
||||||
|
|
||||||
|
return outBuffer;
|
||||||
|
},
|
||||||
|
|
||||||
|
toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) {
|
||||||
|
try {
|
||||||
|
if (!loadedEntries) {
|
||||||
|
readEntries();
|
||||||
|
}
|
||||||
|
sortEntries();
|
||||||
|
|
||||||
|
const dataBlock = [];
|
||||||
|
const entryHeaders = [];
|
||||||
|
let totalSize = 0;
|
||||||
|
let dindex = 0;
|
||||||
|
|
||||||
|
mainHeader.size = 0;
|
||||||
|
mainHeader.offset = 0;
|
||||||
|
|
||||||
|
const compress2Buffer = function (entryLists) {
|
||||||
|
if (entryLists.length) {
|
||||||
|
const entry = entryLists.pop();
|
||||||
|
const name = entry.entryName + entry.extra.toString();
|
||||||
|
if (onItemStart) onItemStart(name);
|
||||||
|
entry.getCompressedDataAsync(function (compressedData) {
|
||||||
|
if (onItemEnd) onItemEnd(name);
|
||||||
|
|
||||||
|
entry.header.offset = dindex;
|
||||||
|
// data header
|
||||||
|
const dataHeader = entry.header.dataHeaderToBinary();
|
||||||
|
const postHeader = Buffer.alloc(name.length, name);
|
||||||
|
const dataLength = dataHeader.length + postHeader.length + compressedData.length;
|
||||||
|
|
||||||
|
dindex += dataLength;
|
||||||
|
|
||||||
|
dataBlock.push(dataHeader);
|
||||||
|
dataBlock.push(postHeader);
|
||||||
|
dataBlock.push(compressedData);
|
||||||
|
|
||||||
|
const entryHeader = entry.packHeader();
|
||||||
|
entryHeaders.push(entryHeader);
|
||||||
|
mainHeader.size += entryHeader.length;
|
||||||
|
totalSize += dataLength + entryHeader.length;
|
||||||
|
|
||||||
|
compress2Buffer(entryLists);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
|
||||||
|
// point to end of data and beginning of central directory first record
|
||||||
|
mainHeader.offset = dindex;
|
||||||
|
|
||||||
|
dindex = 0;
|
||||||
|
const outBuffer = Buffer.alloc(totalSize);
|
||||||
|
dataBlock.forEach(function (content) {
|
||||||
|
content.copy(outBuffer, dindex); // write data blocks
|
||||||
|
dindex += content.length;
|
||||||
|
});
|
||||||
|
entryHeaders.forEach(function (content) {
|
||||||
|
content.copy(outBuffer, dindex); // write central directory entries
|
||||||
|
dindex += content.length;
|
||||||
|
});
|
||||||
|
|
||||||
|
const mh = mainHeader.toBinary();
|
||||||
|
if (_comment) {
|
||||||
|
_comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
|
||||||
|
}
|
||||||
|
|
||||||
|
mh.copy(outBuffer, dindex); // write main header
|
||||||
|
|
||||||
|
onSuccess(outBuffer);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
compress2Buffer(entryList);
|
||||||
|
} catch (e) {
|
||||||
|
onFail(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
37
package-lock.json
generated
37
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.16",
|
"version": "2.1.17",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.16",
|
"version": "2.1.17",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": "^1.0.0",
|
"@actions/artifact": "^1.0.0",
|
||||||
@@ -18,6 +18,7 @@
|
|||||||
"@chrisgavin/safe-which": "^1.0.2",
|
"@chrisgavin/safe-which": "^1.0.2",
|
||||||
"@octokit/plugin-retry": "^3.0.9",
|
"@octokit/plugin-retry": "^3.0.9",
|
||||||
"@octokit/types": "^6.21.1",
|
"@octokit/types": "^6.21.1",
|
||||||
|
"adm-zip": "^0.5.9",
|
||||||
"commander": "^8.1.0",
|
"commander": "^8.1.0",
|
||||||
"console-log-level": "^1.4.1",
|
"console-log-level": "^1.4.1",
|
||||||
"del": "^6.0.0",
|
"del": "^6.0.0",
|
||||||
@@ -38,6 +39,7 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@ava/typescript": "3.0.1",
|
"@ava/typescript": "3.0.1",
|
||||||
|
"@types/adm-zip": "^0.5.0",
|
||||||
"@types/js-yaml": "^4.0.5",
|
"@types/js-yaml": "^4.0.5",
|
||||||
"@types/long": "4.0.1",
|
"@types/long": "4.0.1",
|
||||||
"@types/node": "16.11.22",
|
"@types/node": "16.11.22",
|
||||||
@@ -517,6 +519,15 @@
|
|||||||
"integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==",
|
"integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/adm-zip": {
|
||||||
|
"version": "0.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.0.tgz",
|
||||||
|
"integrity": "sha512-FCJBJq9ODsQZUNURo5ILAQueuA8WJhRvuihS3ke2iI25mJlfV2LK8jG2Qj2z2AWg8U0FtWWqBHVRetceLskSaw==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/color-name": {
|
"node_modules/@types/color-name": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
||||||
@@ -852,6 +863,14 @@
|
|||||||
"node": ">=0.4.0"
|
"node": ">=0.4.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/adm-zip": {
|
||||||
|
"version": "0.5.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.9.tgz",
|
||||||
|
"integrity": "sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/aggregate-error": {
|
"node_modules/aggregate-error": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==",
|
"integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==",
|
||||||
@@ -5887,6 +5906,15 @@
|
|||||||
"integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==",
|
"integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"@types/adm-zip": {
|
||||||
|
"version": "0.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.0.tgz",
|
||||||
|
"integrity": "sha512-FCJBJq9ODsQZUNURo5ILAQueuA8WJhRvuihS3ke2iI25mJlfV2LK8jG2Qj2z2AWg8U0FtWWqBHVRetceLskSaw==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"@types/color-name": {
|
"@types/color-name": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
|
||||||
@@ -6096,6 +6124,11 @@
|
|||||||
"integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==",
|
"integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"adm-zip": {
|
||||||
|
"version": "0.5.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.9.tgz",
|
||||||
|
"integrity": "sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg=="
|
||||||
|
},
|
||||||
"aggregate-error": {
|
"aggregate-error": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==",
|
"integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.16",
|
"version": "2.1.17",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "CodeQL action",
|
"description": "CodeQL action",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -30,6 +30,7 @@
|
|||||||
"@chrisgavin/safe-which": "^1.0.2",
|
"@chrisgavin/safe-which": "^1.0.2",
|
||||||
"@octokit/plugin-retry": "^3.0.9",
|
"@octokit/plugin-retry": "^3.0.9",
|
||||||
"@octokit/types": "^6.21.1",
|
"@octokit/types": "^6.21.1",
|
||||||
|
"adm-zip": "^0.5.9",
|
||||||
"commander": "^8.1.0",
|
"commander": "^8.1.0",
|
||||||
"console-log-level": "^1.4.1",
|
"console-log-level": "^1.4.1",
|
||||||
"del": "^6.0.0",
|
"del": "^6.0.0",
|
||||||
@@ -53,6 +54,7 @@
|
|||||||
],
|
],
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@ava/typescript": "3.0.1",
|
"@ava/typescript": "3.0.1",
|
||||||
|
"@types/adm-zip": "^0.5.0",
|
||||||
"@types/js-yaml": "^4.0.5",
|
"@types/js-yaml": "^4.0.5",
|
||||||
"@types/long": "4.0.1",
|
"@types/long": "4.0.1",
|
||||||
"@types/node": "16.11.22",
|
"@types/node": "16.11.22",
|
||||||
|
|||||||
170
runner/package-lock.json
generated
170
runner/package-lock.json
generated
@@ -56,6 +56,64 @@
|
|||||||
"node": ">=10.0.0"
|
"node": ">=10.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@jridgewell/gen-mapping": {
|
||||||
|
"version": "0.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz",
|
||||||
|
"integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/set-array": "^1.0.1",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.9"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/resolve-uri": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==",
|
||||||
|
"dev": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/set-array": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==",
|
||||||
|
"dev": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/source-map": {
|
||||||
|
"version": "0.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz",
|
||||||
|
"integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/gen-mapping": "^0.3.0",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.9"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/sourcemap-codec": {
|
||||||
|
"version": "1.4.14",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz",
|
||||||
|
"integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"node_modules/@jridgewell/trace-mapping": {
|
||||||
|
"version": "0.3.14",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz",
|
||||||
|
"integrity": "sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/resolve-uri": "^3.0.3",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@nodelib/fs.scandir": {
|
"node_modules/@nodelib/fs.scandir": {
|
||||||
"version": "2.1.5",
|
"version": "2.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||||
@@ -324,9 +382,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/acorn": {
|
"node_modules/acorn": {
|
||||||
"version": "8.4.1",
|
"version": "8.7.1",
|
||||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.4.1.tgz",
|
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz",
|
||||||
"integrity": "sha512-asabaBSkEKosYKMITunzX177CXxQ4Q8BSSzMTKD+FefUhipQC70gfW5SiUDhYQ3vk8G+81HqQk7Fv9OXwwn9KA==",
|
"integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"bin": {
|
"bin": {
|
||||||
"acorn": "bin/acorn"
|
"acorn": "bin/acorn"
|
||||||
@@ -2239,9 +2297,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/source-map-support": {
|
"node_modules/source-map-support": {
|
||||||
"version": "0.5.19",
|
"version": "0.5.21",
|
||||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz",
|
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
|
||||||
"integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==",
|
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"buffer-from": "^1.0.0",
|
"buffer-from": "^1.0.0",
|
||||||
@@ -2365,14 +2423,15 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/terser": {
|
"node_modules/terser": {
|
||||||
"version": "5.7.1",
|
"version": "5.14.2",
|
||||||
"resolved": "https://registry.npmjs.org/terser/-/terser-5.7.1.tgz",
|
"resolved": "https://registry.npmjs.org/terser/-/terser-5.14.2.tgz",
|
||||||
"integrity": "sha512-b3e+d5JbHAe/JSjwsC3Zn55wsBIM7AsHLjKxT31kGCldgbpFePaFo+PiddtO6uwRZWRw7sPXmAN8dTW61xmnSg==",
|
"integrity": "sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@jridgewell/source-map": "^0.3.2",
|
||||||
|
"acorn": "^8.5.0",
|
||||||
"commander": "^2.20.0",
|
"commander": "^2.20.0",
|
||||||
"source-map": "~0.7.2",
|
"source-map-support": "~0.5.20"
|
||||||
"source-map-support": "~0.5.19"
|
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
"terser": "bin/terser"
|
"terser": "bin/terser"
|
||||||
@@ -2405,15 +2464,6 @@
|
|||||||
"webpack": "^5.1.0"
|
"webpack": "^5.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/terser/node_modules/source-map": {
|
|
||||||
"version": "0.7.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
|
|
||||||
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
|
|
||||||
"dev": true,
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/to-fast-properties": {
|
"node_modules/to-fast-properties": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz",
|
||||||
@@ -2884,6 +2934,55 @@
|
|||||||
"integrity": "sha512-Fxt+AfXgjMoin2maPIYzFZnQjAXjAL0PHscM5pRTtatFqB+vZxAM9tLp2Optnuw3QOQC40jTNeGYFOMvyf7v9g==",
|
"integrity": "sha512-Fxt+AfXgjMoin2maPIYzFZnQjAXjAL0PHscM5pRTtatFqB+vZxAM9tLp2Optnuw3QOQC40jTNeGYFOMvyf7v9g==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"@jridgewell/gen-mapping": {
|
||||||
|
"version": "0.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz",
|
||||||
|
"integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"@jridgewell/set-array": "^1.0.1",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.9"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@jridgewell/resolve-uri": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"@jridgewell/set-array": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"@jridgewell/source-map": {
|
||||||
|
"version": "0.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz",
|
||||||
|
"integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"@jridgewell/gen-mapping": "^0.3.0",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.9"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"@jridgewell/sourcemap-codec": {
|
||||||
|
"version": "1.4.14",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz",
|
||||||
|
"integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==",
|
||||||
|
"dev": true
|
||||||
|
},
|
||||||
|
"@jridgewell/trace-mapping": {
|
||||||
|
"version": "0.3.14",
|
||||||
|
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz",
|
||||||
|
"integrity": "sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"@jridgewell/resolve-uri": "^3.0.3",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"@nodelib/fs.scandir": {
|
"@nodelib/fs.scandir": {
|
||||||
"version": "2.1.5",
|
"version": "2.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||||
@@ -3130,9 +3229,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"acorn": {
|
"acorn": {
|
||||||
"version": "8.4.1",
|
"version": "8.7.1",
|
||||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.4.1.tgz",
|
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz",
|
||||||
"integrity": "sha512-asabaBSkEKosYKMITunzX177CXxQ4Q8BSSzMTKD+FefUhipQC70gfW5SiUDhYQ3vk8G+81HqQk7Fv9OXwwn9KA==",
|
"integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"acorn-import-assertions": {
|
"acorn-import-assertions": {
|
||||||
@@ -4544,9 +4643,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"source-map-support": {
|
"source-map-support": {
|
||||||
"version": "0.5.19",
|
"version": "0.5.21",
|
||||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz",
|
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
|
||||||
"integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==",
|
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"buffer-from": "^1.0.0",
|
"buffer-from": "^1.0.0",
|
||||||
@@ -4651,22 +4750,15 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"terser": {
|
"terser": {
|
||||||
"version": "5.7.1",
|
"version": "5.14.2",
|
||||||
"resolved": "https://registry.npmjs.org/terser/-/terser-5.7.1.tgz",
|
"resolved": "https://registry.npmjs.org/terser/-/terser-5.14.2.tgz",
|
||||||
"integrity": "sha512-b3e+d5JbHAe/JSjwsC3Zn55wsBIM7AsHLjKxT31kGCldgbpFePaFo+PiddtO6uwRZWRw7sPXmAN8dTW61xmnSg==",
|
"integrity": "sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
|
"@jridgewell/source-map": "^0.3.2",
|
||||||
|
"acorn": "^8.5.0",
|
||||||
"commander": "^2.20.0",
|
"commander": "^2.20.0",
|
||||||
"source-map": "~0.7.2",
|
"source-map-support": "~0.5.20"
|
||||||
"source-map-support": "~0.5.19"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"source-map": {
|
|
||||||
"version": "0.7.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
|
|
||||||
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
|
|
||||||
"dev": true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"terser-webpack-plugin": {
|
"terser-webpack-plugin": {
|
||||||
|
|||||||
@@ -753,13 +753,3 @@ test("isAnalyzingDefaultBranch()", async (t) => {
|
|||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("sanitizeArifactName", (t) => {
|
|
||||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
|
||||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
|
||||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
|
||||||
t.deepEqual(
|
|
||||||
actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"),
|
|
||||||
"manyinvalid"
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -8,9 +8,12 @@ import * as safeWhich from "@chrisgavin/safe-which";
|
|||||||
import * as yaml from "js-yaml";
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
import * as api from "./api-client";
|
import * as api from "./api-client";
|
||||||
|
import { Config } from "./config-utils";
|
||||||
import * as sharedEnv from "./shared-environment";
|
import * as sharedEnv from "./shared-environment";
|
||||||
import {
|
import {
|
||||||
|
doesDirectoryExist,
|
||||||
getCachedCodeQlVersion,
|
getCachedCodeQlVersion,
|
||||||
|
getCodeQLDatabasePath,
|
||||||
getRequiredEnvParam,
|
getRequiredEnvParam,
|
||||||
GITHUB_DOTCOM_URL,
|
GITHUB_DOTCOM_URL,
|
||||||
isGitHubGhesVersionBelow,
|
isGitHubGhesVersionBelow,
|
||||||
@@ -872,6 +875,31 @@ export async function isAnalyzingDefaultBranch(): Promise<boolean> {
|
|||||||
return currentRef === defaultBranch;
|
return currentRef === defaultBranch;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sanitizeArifactName(name: string): string {
|
export async function printDebugLogs(config: Config) {
|
||||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||||
|
for (const language of config.languages) {
|
||||||
|
const databaseDirectory = getCodeQLDatabasePath(config, language);
|
||||||
|
const logsDirectory = path.join(databaseDirectory, "log");
|
||||||
|
if (!doesDirectoryExist(logsDirectory)) {
|
||||||
|
core.info(`Directory ${logsDirectory} does not exist.`);
|
||||||
|
continue; // Skip this language database.
|
||||||
|
}
|
||||||
|
|
||||||
|
const walkLogFiles = (dir: string) => {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
if (entries.length === 0) {
|
||||||
|
core.info(`No debug logs found at directory ${logsDirectory}.`);
|
||||||
|
}
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile()) {
|
||||||
|
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||||
|
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||||
|
core.endGroup();
|
||||||
|
} else if (entry.isDirectory()) {
|
||||||
|
walkLogFiles(path.resolve(dir, entry.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
walkLogFiles(logsDirectory);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1
src/analyze-action-post.test.ts
Normal file
1
src/analyze-action-post.test.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
// TODO(angelapwen): Test run() here
|
||||||
42
src/analyze-action-post.ts
Normal file
42
src/analyze-action-post.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* This file is the entry point for the `post:` hook of `analyze-action.yml`.
|
||||||
|
* It will run after the all steps in this job, in reverse order in relation to
|
||||||
|
* other `post:` hooks.
|
||||||
|
*/
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
|
import * as actionsUtil from "./actions-util";
|
||||||
|
import { getConfig } from "./config-utils";
|
||||||
|
import * as debugArtifacts from "./debug-artifacts";
|
||||||
|
import { getActionsLogger } from "./logging";
|
||||||
|
|
||||||
|
async function run(uploadSarifDebugArtifact: Function) {
|
||||||
|
const logger = getActionsLogger();
|
||||||
|
|
||||||
|
const config = await getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
|
if (config === undefined) {
|
||||||
|
throw new Error(
|
||||||
|
"Config file could not be found at expected location. Did the 'init' action fail to start?"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload Actions SARIF artifacts for debugging
|
||||||
|
if (config?.debugMode) {
|
||||||
|
core.info(
|
||||||
|
"Debug mode is on. Uploading available SARIF files as Actions debugging artifact..."
|
||||||
|
);
|
||||||
|
const outputDir = actionsUtil.getRequiredInput("output");
|
||||||
|
await uploadSarifDebugArtifact(config, outputDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runWrapper() {
|
||||||
|
try {
|
||||||
|
await run(debugArtifacts.uploadSarifDebugArtifact);
|
||||||
|
} catch (error) {
|
||||||
|
core.setFailed(`analyze action cleanup failed: ${error}`);
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void runWrapper();
|
||||||
@@ -1,7 +1,3 @@
|
|||||||
import * as fs from "fs";
|
|
||||||
import * as path from "path";
|
|
||||||
|
|
||||||
import * as artifact from "@actions/artifact";
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
@@ -13,7 +9,7 @@ import {
|
|||||||
runFinalize,
|
runFinalize,
|
||||||
} from "./analyze";
|
} from "./analyze";
|
||||||
import { getGitHubVersionActionsOnly } from "./api-client";
|
import { getGitHubVersionActionsOnly } from "./api-client";
|
||||||
import { CODEQL_VERSION_NEW_TRACING, getCodeQL } from "./codeql";
|
import { getCodeQL } from "./codeql";
|
||||||
import { Config, getConfig } from "./config-utils";
|
import { Config, getConfig } from "./config-utils";
|
||||||
import { uploadDatabases } from "./database-upload";
|
import { uploadDatabases } from "./database-upload";
|
||||||
import { GitHubFeatureFlags } from "./feature-flags";
|
import { GitHubFeatureFlags } from "./feature-flags";
|
||||||
@@ -22,7 +18,6 @@ import { parseRepositoryNwo } from "./repository";
|
|||||||
import * as upload_lib from "./upload-lib";
|
import * as upload_lib from "./upload-lib";
|
||||||
import { UploadResult } from "./upload-lib";
|
import { UploadResult } from "./upload-lib";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { bundleDb, codeQlVersionAbove } from "./util";
|
|
||||||
|
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
@@ -134,50 +129,6 @@ async function run() {
|
|||||||
config,
|
config,
|
||||||
logger
|
logger
|
||||||
);
|
);
|
||||||
|
|
||||||
if (config.debugMode) {
|
|
||||||
// Upload the SARIF files as an Actions artifact for debugging
|
|
||||||
await uploadDebugArtifacts(
|
|
||||||
config.languages.map((lang) =>
|
|
||||||
path.resolve(outputDir, `${lang}.sarif`)
|
|
||||||
),
|
|
||||||
outputDir,
|
|
||||||
config.debugArtifactName
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const codeql = await getCodeQL(config.codeQLCmd);
|
|
||||||
|
|
||||||
if (config.debugMode) {
|
|
||||||
// Upload the logs as an Actions artifact for debugging
|
|
||||||
let toUpload: string[] = [];
|
|
||||||
for (const language of config.languages) {
|
|
||||||
toUpload = toUpload.concat(
|
|
||||||
listFolder(
|
|
||||||
path.resolve(util.getCodeQLDatabasePath(config, language), "log")
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING)) {
|
|
||||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
|
||||||
toUpload = toUpload.concat(
|
|
||||||
listFolder(path.resolve(config.dbLocation, "log"))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
await uploadDebugArtifacts(
|
|
||||||
toUpload,
|
|
||||||
config.dbLocation,
|
|
||||||
config.debugArtifactName
|
|
||||||
);
|
|
||||||
if (!(await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING))) {
|
|
||||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
|
||||||
await uploadDebugArtifacts(
|
|
||||||
[path.resolve(config.tempDir, "compound-build-tracer.log")],
|
|
||||||
config.tempDir,
|
|
||||||
config.debugArtifactName
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||||
@@ -237,56 +188,6 @@ async function run() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
} finally {
|
|
||||||
if (config?.debugMode) {
|
|
||||||
try {
|
|
||||||
// Upload the database bundles as an Actions artifact for debugging
|
|
||||||
const toUpload: string[] = [];
|
|
||||||
for (const language of config.languages) {
|
|
||||||
toUpload.push(
|
|
||||||
await bundleDb(
|
|
||||||
config,
|
|
||||||
language,
|
|
||||||
await getCodeQL(config.codeQLCmd),
|
|
||||||
`${config.debugDatabaseName}-${language}`
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
await uploadDebugArtifacts(
|
|
||||||
toUpload,
|
|
||||||
config.dbLocation,
|
|
||||||
config.debugArtifactName
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`Failed to upload database debug bundles: ${error}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config?.debugMode) {
|
|
||||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
|
||||||
for (const language of config.languages) {
|
|
||||||
const databaseDirectory = util.getCodeQLDatabasePath(config, language);
|
|
||||||
const logsDirectory = path.join(databaseDirectory, "log");
|
|
||||||
|
|
||||||
const walkLogFiles = (dir: string) => {
|
|
||||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
||||||
for (const entry of entries) {
|
|
||||||
if (entry.isFile()) {
|
|
||||||
core.startGroup(
|
|
||||||
`CodeQL Debug Logs - ${language} - ${entry.name}`
|
|
||||||
);
|
|
||||||
process.stdout.write(
|
|
||||||
fs.readFileSync(path.resolve(dir, entry.name))
|
|
||||||
);
|
|
||||||
core.endGroup();
|
|
||||||
} else if (entry.isDirectory()) {
|
|
||||||
walkLogFiles(path.resolve(dir, entry.name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
walkLogFiles(logsDirectory);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (runStats && uploadResult) {
|
if (runStats && uploadResult) {
|
||||||
@@ -301,37 +202,6 @@ async function run() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function uploadDebugArtifacts(
|
|
||||||
toUpload: string[],
|
|
||||||
rootDir: string,
|
|
||||||
artifactName: string
|
|
||||||
) {
|
|
||||||
let suffix = "";
|
|
||||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
|
||||||
if (matrix !== undefined && matrix !== "null") {
|
|
||||||
for (const entry of Object.entries(JSON.parse(matrix)).sort())
|
|
||||||
suffix += `-${entry[1]}`;
|
|
||||||
}
|
|
||||||
await artifact.create().uploadArtifact(
|
|
||||||
actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`),
|
|
||||||
toUpload.map((file) => path.normalize(file)),
|
|
||||||
path.normalize(rootDir)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function listFolder(dir: string): string[] {
|
|
||||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
||||||
let files: string[] = [];
|
|
||||||
for (const entry of entries) {
|
|
||||||
if (entry.isFile()) {
|
|
||||||
files.push(path.resolve(dir, entry.name));
|
|
||||||
} else if (entry.isDirectory()) {
|
|
||||||
files = files.concat(listFolder(path.resolve(dir, entry.name)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const runPromise = run();
|
export const runPromise = run();
|
||||||
|
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
|
|||||||
@@ -147,7 +147,7 @@ export async function createdDBForScannedLanguages(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function dbIsFinalized(
|
export function dbIsFinalized(
|
||||||
config: configUtils.Config,
|
config: configUtils.Config,
|
||||||
language: Language,
|
language: Language,
|
||||||
logger: Logger
|
logger: Logger
|
||||||
|
|||||||
18
src/debug-artifacts.test.ts
Normal file
18
src/debug-artifacts.test.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import test from "ava";
|
||||||
|
|
||||||
|
import * as debugArtifacts from "./debug-artifacts";
|
||||||
|
|
||||||
|
test("sanitizeArifactName", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
debugArtifacts.sanitizeArifactName("hello-world_"),
|
||||||
|
"hello-world_"
|
||||||
|
);
|
||||||
|
t.deepEqual(debugArtifacts.sanitizeArifactName("hello`world`"), "helloworld");
|
||||||
|
t.deepEqual(debugArtifacts.sanitizeArifactName("hello===123"), "hello123");
|
||||||
|
t.deepEqual(
|
||||||
|
debugArtifacts.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"),
|
||||||
|
"manyinvalid"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// TODO(angelapwen): Test uploadDebugArtifacts if toUpload is empty
|
||||||
205
src/debug-artifacts.ts
Normal file
205
src/debug-artifacts.ts
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
import * as fs from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
import zlib from "zlib";
|
||||||
|
|
||||||
|
import * as artifact from "@actions/artifact";
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
import AdmZip from "adm-zip";
|
||||||
|
import del from "del";
|
||||||
|
|
||||||
|
import { getRequiredInput } from "./actions-util";
|
||||||
|
import { dbIsFinalized } from "./analyze";
|
||||||
|
import { CODEQL_VERSION_NEW_TRACING, getCodeQL } from "./codeql";
|
||||||
|
import { Config } from "./config-utils";
|
||||||
|
import { Language } from "./languages";
|
||||||
|
import { Logger } from "./logging";
|
||||||
|
import {
|
||||||
|
bundleDb,
|
||||||
|
codeQlVersionAbove,
|
||||||
|
doesDirectoryExist,
|
||||||
|
getCodeQLDatabasePath,
|
||||||
|
listFolder,
|
||||||
|
} from "./util";
|
||||||
|
|
||||||
|
export function sanitizeArifactName(name: string): string {
|
||||||
|
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function uploadDebugArtifacts(
|
||||||
|
toUpload: string[],
|
||||||
|
rootDir: string,
|
||||||
|
artifactName: string
|
||||||
|
) {
|
||||||
|
if (toUpload.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let suffix = "";
|
||||||
|
const matrix = getRequiredInput("matrix");
|
||||||
|
if (matrix) {
|
||||||
|
try {
|
||||||
|
for (const [, matrixVal] of Object.entries(JSON.parse(matrix)).sort())
|
||||||
|
suffix += `-${matrixVal}`;
|
||||||
|
} catch (e) {
|
||||||
|
core.info(
|
||||||
|
"Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await artifact.create().uploadArtifact(
|
||||||
|
sanitizeArifactName(`${artifactName}${suffix}`),
|
||||||
|
toUpload.map((file) => path.normalize(file)),
|
||||||
|
path.normalize(rootDir)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function uploadSarifDebugArtifact(
|
||||||
|
config: Config,
|
||||||
|
outputDir: string
|
||||||
|
) {
|
||||||
|
if (!doesDirectoryExist(outputDir)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let toUpload: string[] = [];
|
||||||
|
for (const lang of config.languages) {
|
||||||
|
const sarifFile = path.resolve(outputDir, `${lang}.sarif`);
|
||||||
|
if (fs.existsSync(sarifFile)) {
|
||||||
|
toUpload = toUpload.concat(sarifFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await uploadDebugArtifacts(toUpload, outputDir, config.debugArtifactName);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function uploadLogsDebugArtifact(config: Config) {
|
||||||
|
const codeql = await getCodeQL(config.codeQLCmd);
|
||||||
|
|
||||||
|
let toUpload: string[] = [];
|
||||||
|
for (const language of config.languages) {
|
||||||
|
const databaseDirectory = getCodeQLDatabasePath(config, language);
|
||||||
|
const logsDirectory = path.resolve(databaseDirectory, "log");
|
||||||
|
if (doesDirectoryExist(logsDirectory)) {
|
||||||
|
toUpload = toUpload.concat(listFolder(logsDirectory));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING)) {
|
||||||
|
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||||
|
const multiLanguageTracingLogsDirectory = path.resolve(
|
||||||
|
config.dbLocation,
|
||||||
|
"log"
|
||||||
|
);
|
||||||
|
if (doesDirectoryExist(multiLanguageTracingLogsDirectory)) {
|
||||||
|
toUpload = toUpload.concat(listFolder(multiLanguageTracingLogsDirectory));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await uploadDebugArtifacts(
|
||||||
|
toUpload,
|
||||||
|
config.dbLocation,
|
||||||
|
config.debugArtifactName
|
||||||
|
);
|
||||||
|
|
||||||
|
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||||
|
if (!(await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING))) {
|
||||||
|
const compoundBuildTracerLogDirectory = path.resolve(
|
||||||
|
config.tempDir,
|
||||||
|
"compound-build-tracer.log"
|
||||||
|
);
|
||||||
|
if (doesDirectoryExist(compoundBuildTracerLogDirectory)) {
|
||||||
|
await uploadDebugArtifacts(
|
||||||
|
[compoundBuildTracerLogDirectory],
|
||||||
|
config.tempDir,
|
||||||
|
config.debugArtifactName
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If a database has not been finalized, we cannot run the `codeql database bundle`
|
||||||
|
* command in the CLI because it will return an error. Instead we directly zip
|
||||||
|
* all files in the database folder and upload it as an artifact.
|
||||||
|
*/
|
||||||
|
async function uploadPartialDatabaseBundle(config: Config, language: Language) {
|
||||||
|
const databasePath = getCodeQLDatabasePath(config, language);
|
||||||
|
const databaseBundlePath = path.resolve(
|
||||||
|
config.dbLocation,
|
||||||
|
`${config.debugDatabaseName}-${language}-partial.zip`
|
||||||
|
);
|
||||||
|
core.info(
|
||||||
|
`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...`
|
||||||
|
);
|
||||||
|
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||||
|
if (fs.existsSync(databaseBundlePath)) {
|
||||||
|
await del(databaseBundlePath, { force: true });
|
||||||
|
}
|
||||||
|
const zip = new AdmZip();
|
||||||
|
zip.addLocalFolder(databasePath);
|
||||||
|
zip.writeZip(databaseBundlePath);
|
||||||
|
await uploadDebugArtifacts(
|
||||||
|
[databaseBundlePath],
|
||||||
|
config.dbLocation,
|
||||||
|
config.debugArtifactName
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadPartialDatabaseBundleZlib(
|
||||||
|
config: Config,
|
||||||
|
language: Language
|
||||||
|
) {
|
||||||
|
const databasePath = getCodeQLDatabasePath(config, language);
|
||||||
|
const databaseBundlePath = path.resolve(
|
||||||
|
config.dbLocation,
|
||||||
|
`${config.debugDatabaseName}-${language}-partial.gz`
|
||||||
|
);
|
||||||
|
core.info(
|
||||||
|
`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...`
|
||||||
|
);
|
||||||
|
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||||
|
if (fs.existsSync(databaseBundlePath)) {
|
||||||
|
await del(databaseBundlePath, { force: true });
|
||||||
|
}
|
||||||
|
const gzip = zlib.createGzip();
|
||||||
|
const outputStream = fs.createWriteStream(databaseBundlePath);
|
||||||
|
|
||||||
|
// Write all files in database folder to gz location
|
||||||
|
listFolder(databasePath).map((file) => {
|
||||||
|
const readStream = fs.createReadStream(file);
|
||||||
|
readStream.pipe(gzip).pipe(outputStream);
|
||||||
|
});
|
||||||
|
|
||||||
|
await uploadDebugArtifacts(
|
||||||
|
[databaseBundlePath],
|
||||||
|
config.dbLocation,
|
||||||
|
config.debugArtifactName
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function uploadDatabaseBundleDebugArtifact(
|
||||||
|
config: Config,
|
||||||
|
logger: Logger
|
||||||
|
) {
|
||||||
|
for (const language of config.languages) {
|
||||||
|
if (!dbIsFinalized(config, language, logger)) {
|
||||||
|
await uploadPartialDatabaseBundleZlib(config, language);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
// Otherwise run `codeql database bundle` command.
|
||||||
|
const bundlePath = await bundleDb(
|
||||||
|
config,
|
||||||
|
language,
|
||||||
|
await getCodeQL(config.codeQLCmd),
|
||||||
|
`${config.debugDatabaseName}-${language}`
|
||||||
|
);
|
||||||
|
await uploadDebugArtifacts(
|
||||||
|
[bundlePath],
|
||||||
|
config.dbLocation,
|
||||||
|
config.debugArtifactName
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
core.info(
|
||||||
|
`Failed to upload database debug bundles for ${config.debugDatabaseName}-${language}: ${error}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20220623"
|
"bundleVersion": "codeql-bundle-20220714"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -67,6 +67,37 @@ for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test("API response missing", async (t) => {
|
||||||
|
await withTmpDir(async (tmpDir) => {
|
||||||
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
|
|
||||||
|
const loggedMessages = [];
|
||||||
|
const featureFlags = new GitHubFeatureFlags(
|
||||||
|
{ type: GitHubVariant.DOTCOM },
|
||||||
|
testApiDetails,
|
||||||
|
testRepositoryNwo,
|
||||||
|
getRecordingLogger(loggedMessages)
|
||||||
|
);
|
||||||
|
|
||||||
|
mockFeatureFlagApiEndpoint(403, {});
|
||||||
|
|
||||||
|
for (const flag of Object.values(FeatureFlag)) {
|
||||||
|
t.assert((await featureFlags.getValue(flag)) === false);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const featureFlag of ["ml_powered_queries_enabled"]) {
|
||||||
|
t.assert(
|
||||||
|
loggedMessages.find(
|
||||||
|
(v: LoggedMessage) =>
|
||||||
|
v.type === "debug" &&
|
||||||
|
v.message ===
|
||||||
|
`No feature flags API response for ${featureFlag}, considering it disabled.`
|
||||||
|
) !== undefined
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
test("Feature flags are disabled if they're not returned in API response", async (t) => {
|
test("Feature flags are disabled if they're not returned in API response", async (t) => {
|
||||||
await withTmpDir(async (tmpDir) => {
|
await withTmpDir(async (tmpDir) => {
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
|
|||||||
@@ -31,14 +31,21 @@ export class GitHubFeatureFlags implements FeatureFlags {
|
|||||||
) {}
|
) {}
|
||||||
|
|
||||||
async getValue(flag: FeatureFlag): Promise<boolean> {
|
async getValue(flag: FeatureFlag): Promise<boolean> {
|
||||||
const response = (await this.getApiResponse())[flag];
|
const response = await this.getApiResponse();
|
||||||
if (response === undefined) {
|
if (response === undefined) {
|
||||||
|
this.logger.debug(
|
||||||
|
`No feature flags API response for ${flag}, considering it disabled.`
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const flagValue = response[flag];
|
||||||
|
if (flagValue === undefined) {
|
||||||
this.logger.debug(
|
this.logger.debug(
|
||||||
`Feature flag '${flag}' undefined in API response, considering it disabled.`
|
`Feature flag '${flag}' undefined in API response, considering it disabled.`
|
||||||
);
|
);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return response;
|
return flagValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getApiResponse(): Promise<FeatureFlagsApiResponse> {
|
private async getApiResponse(): Promise<FeatureFlagsApiResponse> {
|
||||||
|
|||||||
1
src/init-action-post.test.ts
Normal file
1
src/init-action-post.test.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
// TODO(angelapwen): Test run() here.
|
||||||
53
src/init-action-post.ts
Normal file
53
src/init-action-post.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
/**
|
||||||
|
* This file is the entry point for the `post:` hook of `init-action.yml`.
|
||||||
|
* It will run after the all steps in this job, in reverse order in relation to
|
||||||
|
* other `post:` hooks.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
|
import * as actionsUtil from "./actions-util";
|
||||||
|
import { getConfig } from "./config-utils";
|
||||||
|
import * as debugArtifacts from "./debug-artifacts";
|
||||||
|
import { getActionsLogger } from "./logging";
|
||||||
|
|
||||||
|
async function run(
|
||||||
|
uploadDatabaseBundleDebugArtifact: Function,
|
||||||
|
uploadLogsDebugArtifact: Function,
|
||||||
|
printDebugLogs: Function
|
||||||
|
) {
|
||||||
|
const logger = getActionsLogger();
|
||||||
|
|
||||||
|
const config = await getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
|
if (config === undefined) {
|
||||||
|
throw new Error(
|
||||||
|
"Config file could not be found at expected location. Did the 'init' action fail to start?"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload appropriate Actions artifacts for debugging
|
||||||
|
if (config?.debugMode) {
|
||||||
|
core.info(
|
||||||
|
"Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts..."
|
||||||
|
);
|
||||||
|
await uploadDatabaseBundleDebugArtifact(config, logger);
|
||||||
|
await uploadLogsDebugArtifact(config);
|
||||||
|
|
||||||
|
await printDebugLogs(config);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runWrapper() {
|
||||||
|
try {
|
||||||
|
await run(
|
||||||
|
debugArtifacts.uploadDatabaseBundleDebugArtifact,
|
||||||
|
debugArtifacts.uploadLogsDebugArtifact,
|
||||||
|
actionsUtil.printDebugLogs
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
core.setFailed(`init action cleanup failed: ${error}`);
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void runWrapper();
|
||||||
@@ -439,3 +439,15 @@ for (const [
|
|||||||
isActionsStub.restore();
|
isActionsStub.restore();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(angelapwen): Test doesDirectoryExist() returns true if directory
|
||||||
|
|
||||||
|
// TODO(angelapwen): Test doesDirectoryExist() returns false if file
|
||||||
|
|
||||||
|
// TODO(angelapwen): Test doesDirectoryExist() returns false if no file of this type exists
|
||||||
|
|
||||||
|
// TODO(angelapwen): Test listFolder() returns files in directory
|
||||||
|
|
||||||
|
// TODO(angelapwen): Test listFolder() returns empty if not a directory
|
||||||
|
|
||||||
|
// TODO(angelapwen): Test doesDirectoryExist() returns empty if directory is empty
|
||||||
|
|||||||
28
src/util.ts
28
src/util.ts
@@ -756,3 +756,31 @@ export async function checkActionVersion(version: string) {
|
|||||||
export function isInTestMode(): boolean {
|
export function isInTestMode(): boolean {
|
||||||
return process.env["TEST_MODE"] === "true" || false;
|
return process.env["TEST_MODE"] === "true" || false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Returns whether the path in the argument represents an existing directory.
|
||||||
|
*/
|
||||||
|
export function doesDirectoryExist(dirPath: string): boolean {
|
||||||
|
try {
|
||||||
|
const stats = fs.lstatSync(dirPath);
|
||||||
|
return stats.isDirectory();
|
||||||
|
} catch (e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of files in a given directory.
|
||||||
|
*/
|
||||||
|
export function listFolder(dir: string): string[] {
|
||||||
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||||
|
let files: string[] = [];
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isFile()) {
|
||||||
|
files.push(path.resolve(dir, entry.name));
|
||||||
|
} else if (entry.isDirectory()) {
|
||||||
|
files = files.concat(listFolder(path.resolve(dir, entry.name)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user