diff --git a/lib/init-action-cleanup.js b/lib/init-action-cleanup.js index e295c2d2e..67db0a741 100644 --- a/lib/init-action-cleanup.js +++ b/lib/init-action-cleanup.js @@ -18,11 +18,16 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); exports.uploadDatabaseBundleDebugArtifact = void 0; const fs = __importStar(require("fs")); const path = __importStar(require("path")); const core = __importStar(require("@actions/core")); +const adm_zip_1 = __importDefault(require("adm-zip")); +const del_1 = __importDefault(require("del")); const actionsUtil = __importStar(require("./actions-util")); const analyze_1 = require("./analyze"); const codeql_1 = require("./codeql"); @@ -46,7 +51,17 @@ async function uploadDatabaseBundleDebugArtifact(config, logger) { for (const language of config.languages) { if (!(0, analyze_1.dbIsFinalized)(config, language, logger)) { core.info(`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle...`); - // TODO(angelapwen): Zip up files and upload directly. + // Zip up files and upload directly. + const databasePath = (0, util_1.getCodeQLDatabasePath)(config, language); + const databaseBundlePath = path.resolve(config.dbLocation, `${config.debugDatabaseName}.zip`); + // See `bundleDb` for explanation behind deleting existing db bundle. + if (fs.existsSync(databaseBundlePath)) { + await (0, del_1.default)(databaseBundlePath, { force: true }); + } + const zip = new adm_zip_1.default(); + zip.addLocalFolder(databasePath); + zip.writeZip(databaseBundlePath); + await actionsUtil.uploadDebugArtifacts([databaseBundlePath], config.dbLocation, config.debugArtifactName); continue; } try { diff --git a/lib/init-action-cleanup.js.map b/lib/init-action-cleanup.js.map index 38df4c8eb..86b9ea4fb 100644 --- a/lib/init-action-cleanup.js.map +++ b/lib/init-action-cleanup.js.map @@ -1 +1 @@ -{"version":3,"file":"init-action-cleanup.js","sourceRoot":"","sources":["../src/init-action-cleanup.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA0C;AAC1C,qCAAiE;AACjE,iDAAmD;AACnD,uCAAqD;AACrD,iCAA6E;AAE7E,SAAS,UAAU,CAAC,GAAW;IAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;IAC7D,IAAI,KAAK,GAAa,EAAE,CAAC;IACzB,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;QAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;YAClB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;SAC3C;aAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;YAC9B,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACjE;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAEM,KAAK,UAAU,iCAAiC,CACrD,MAAc,EACd,MAAc;IAEd,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,IAAA,uBAAa,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,EAAE;YAC5C,IAAI,CAAC,IAAI,CACP,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,yDAAyD,CACjG,CAAC;YACF,sDAAsD;YACtD,SAAS;SACV;QACD,IAAI;YACF,kDAAkD;YAClD,MAAM,QAAQ,GAAa,EAAE,CAAC;YAC9B,QAAQ,CAAC,IAAI,CACX,MAAM,IAAA,eAAQ,EACZ,MAAM,EACN,QAAQ,EACR,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,EACjC,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,EAAE,CAC1C,CACF,CAAC;YACF,MAAM,WAAW,CAAC,oBAAoB,CACpC,QAAQ,EACR,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CACP,+CAA+C,MAAM,CAAC,iBAAiB,IAAI,QAAQ,KAAK,KAAK,EAAE,CAChG,CAAC;SACH;KACF;AACH,CAAC;AAlCD,8EAkCC;AAED,KAAK,UAAU,uBAAuB,CAAC,MAAc;IACnD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,iBAAiB,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC7D,IAAI,WAAW,CAAC,kBAAkB,CAAC,aAAa,CAAC,EAAE;YACjD,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,aAAa,CAAC,CAAC,CAAC;SACvD;KACF;IAED,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,8EAA8E;QAC9E,MAAM,iCAAiC,GAAG,IAAI,CAAC,OAAO,CACpD,MAAM,CAAC,UAAU,EACjB,KAAK,CACN,CAAC;QACF,IAAI,WAAW,CAAC,kBAAkB,CAAC,iCAAiC,CAAC,EAAE;YACrE,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,iCAAiC,CAAC,CAAC,CAAC;SAC3E;KACF;IACD,MAAM,WAAW,CAAC,oBAAoB,CACpC,QAAQ,EACR,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;IAEF,sFAAsF;IACtF,IAAI,CAAC,CAAC,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,CAAC,EAAE;QACnE,MAAM,+BAA+B,GAAG,IAAI,CAAC,OAAO,CAClD,MAAM,CAAC,OAAO,EACd,2BAA2B,CAC5B,CAAC;QACF,IAAI,WAAW,CAAC,kBAAkB,CAAC,+BAA+B,CAAC,EAAE;YACnE,MAAM,WAAW,CAAC,oBAAoB,CACpC,CAAC,+BAA+B,CAAC,EACjC,MAAM,CAAC,OAAO,EACd,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;KACF;AACH,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,MAAc;IACxD,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;IAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,iBAAiB,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC1D,IAAI,CAAC,WAAW,CAAC,kBAAkB,CAAC,aAAa,CAAC,EAAE;YAClD,IAAI,CAAC,IAAI,CAAC,aAAa,aAAa,kBAAkB,CAAC,CAAC;YACxD,SAAS,CAAC,+BAA+B;SAC1C;QAED,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;YACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;YAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,oCAAoC,aAAa,GAAG,CAAC,CAAC;aACjE;YACD,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;gBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;oBAClB,IAAI,CAAC,UAAU,CAAC,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;oBACnE,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;oBACrE,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACjB;qBAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;oBAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;iBAC7C;aACF;QACH,CAAC,CAAC;QACF,YAAY,CAAC,aAAa,CAAC,CAAC;KAC7B;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IACtE,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QACtC,MAAM,4BAA4B,CAAC,MAAM,CAAC,CAAC;KAC5C;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,+BAA+B,KAAK,EAAE,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"} \ No newline at end of file +{"version":3,"file":"init-action-cleanup.js","sourceRoot":"","sources":["../src/init-action-cleanup.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AACtC,sDAA6B;AAC7B,8CAAsB;AAEtB,4DAA8C;AAC9C,uCAA0C;AAC1C,qCAAiE;AACjE,iDAAmD;AACnD,uCAAqD;AACrD,iCAA6E;AAE7E,SAAS,UAAU,CAAC,GAAW;IAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;IAC7D,IAAI,KAAK,GAAa,EAAE,CAAC;IACzB,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;QAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;YAClB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;SAC3C;aAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;YAC9B,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACjE;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAEM,KAAK,UAAU,iCAAiC,CACrD,MAAc,EACd,MAAc;IAEd,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,IAAA,uBAAa,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,EAAE;YAC5C,IAAI,CAAC,IAAI,CACP,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,yDAAyD,CACjG,CAAC;YACF,oCAAoC;YACpC,MAAM,YAAY,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;YAC7D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CACrC,MAAM,CAAC,UAAU,EACjB,GAAG,MAAM,CAAC,iBAAiB,MAAM,CAClC,CAAC;YACF,qEAAqE;YACrE,IAAI,EAAE,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;gBACrC,MAAM,IAAA,aAAG,EAAC,kBAAkB,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;aAChD;YACD,MAAM,GAAG,GAAG,IAAI,iBAAM,EAAE,CAAC;YACzB,GAAG,CAAC,cAAc,CAAC,YAAY,CAAC,CAAC;YACjC,GAAG,CAAC,QAAQ,CAAC,kBAAkB,CAAC,CAAC;YACjC,MAAM,WAAW,CAAC,oBAAoB,CACpC,CAAC,kBAAkB,CAAC,EACpB,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;YACF,SAAS;SACV;QACD,IAAI;YACF,kDAAkD;YAClD,MAAM,QAAQ,GAAa,EAAE,CAAC;YAC9B,QAAQ,CAAC,IAAI,CACX,MAAM,IAAA,eAAQ,EACZ,MAAM,EACN,QAAQ,EACR,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,EACjC,GAAG,MAAM,CAAC,iBAAiB,IAAI,QAAQ,EAAE,CAC1C,CACF,CAAC;YACF,MAAM,WAAW,CAAC,oBAAoB,CACpC,QAAQ,EACR,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CACP,+CAA+C,MAAM,CAAC,iBAAiB,IAAI,QAAQ,KAAK,KAAK,EAAE,CAChG,CAAC;SACH;KACF;AACH,CAAC;AAnDD,8EAmDC;AAED,KAAK,UAAU,uBAAuB,CAAC,MAAc;IACnD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,IAAI,QAAQ,GAAa,EAAE,CAAC;IAC5B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,iBAAiB,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC7D,IAAI,WAAW,CAAC,kBAAkB,CAAC,aAAa,CAAC,EAAE;YACjD,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,aAAa,CAAC,CAAC,CAAC;SACvD;KACF;IAED,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,8EAA8E;QAC9E,MAAM,iCAAiC,GAAG,IAAI,CAAC,OAAO,CACpD,MAAM,CAAC,UAAU,EACjB,KAAK,CACN,CAAC;QACF,IAAI,WAAW,CAAC,kBAAkB,CAAC,iCAAiC,CAAC,EAAE;YACrE,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,iCAAiC,CAAC,CAAC,CAAC;SAC3E;KACF;IACD,MAAM,WAAW,CAAC,oBAAoB,CACpC,QAAQ,EACR,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,iBAAiB,CACzB,CAAC;IAEF,sFAAsF;IACtF,IAAI,CAAC,CAAC,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,CAAC,EAAE;QACnE,MAAM,+BAA+B,GAAG,IAAI,CAAC,OAAO,CAClD,MAAM,CAAC,OAAO,EACd,2BAA2B,CAC5B,CAAC;QACF,IAAI,WAAW,CAAC,kBAAkB,CAAC,+BAA+B,CAAC,EAAE;YACnE,MAAM,WAAW,CAAC,oBAAoB,CACpC,CAAC,+BAA+B,CAAC,EACjC,MAAM,CAAC,OAAO,EACd,MAAM,CAAC,iBAAiB,CACzB,CAAC;SACH;KACF;AACH,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,MAAc;IACxD,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;IAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,iBAAiB,GAAG,IAAA,4BAAqB,EAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;QAC1D,IAAI,CAAC,WAAW,CAAC,kBAAkB,CAAC,aAAa,CAAC,EAAE;YAClD,IAAI,CAAC,IAAI,CAAC,aAAa,aAAa,kBAAkB,CAAC,CAAC;YACxD,SAAS,CAAC,+BAA+B;SAC1C;QAED,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;YACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;YAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,oCAAoC,aAAa,GAAG,CAAC,CAAC;aACjE;YACD,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;gBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;oBAClB,IAAI,CAAC,UAAU,CAAC,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;oBACnE,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;oBACrE,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACjB;qBAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;oBAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;iBAC7C;aACF;QACH,CAAC,CAAC;QACF,YAAY,CAAC,aAAa,CAAC,CAAC;KAC7B;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IACtE,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QACtC,MAAM,4BAA4B,CAAC,MAAM,CAAC,CAAC;KAC5C;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,+BAA+B,KAAK,EAAE,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"} \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index eaa33ca13..ef4118da2 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -464,6 +464,15 @@ "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==", "dev": true }, + "node_modules/@types/adm-zip": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.0.tgz", + "integrity": "sha512-FCJBJq9ODsQZUNURo5ILAQueuA8WJhRvuihS3ke2iI25mJlfV2LK8jG2Qj2z2AWg8U0FtWWqBHVRetceLskSaw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/color-name": { "version": "1.1.1", "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", @@ -799,6 +808,14 @@ "node": ">=0.4.0" } }, + "node_modules/adm-zip": { + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.9.tgz", + "integrity": "sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg==", + "engines": { + "node": ">=6.0" + } + }, "node_modules/aggregate-error": { "version": "3.0.1", "integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==", diff --git a/node_modules/@types/adm-zip/LICENSE b/node_modules/@types/adm-zip/LICENSE new file mode 100755 index 000000000..9e841e7a2 --- /dev/null +++ b/node_modules/@types/adm-zip/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/adm-zip/README.md b/node_modules/@types/adm-zip/README.md new file mode 100755 index 000000000..0234987d9 --- /dev/null +++ b/node_modules/@types/adm-zip/README.md @@ -0,0 +1,16 @@ +# Installation +> `npm install --save @types/adm-zip` + +# Summary +This package contains type definitions for adm-zip (https://github.com/cthackers/adm-zip). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/adm-zip. + +### Additional Details + * Last updated: Fri, 01 Apr 2022 08:01:43 GMT + * Dependencies: [@types/node](https://npmjs.com/package/@types/node) + * Global values: none + +# Credits +These definitions were written by [John Vilk](https://github.com/jvilk), [Abner Oliveira](https://github.com/abner), [BendingBender](https://github.com/BendingBender), [Matthew Sainsbury](https://github.com/mattsains), and [Lei Nelissen](https://github.com/LeiNelissen). diff --git a/node_modules/@types/adm-zip/index.d.ts b/node_modules/@types/adm-zip/index.d.ts new file mode 100755 index 000000000..7150db0f0 --- /dev/null +++ b/node_modules/@types/adm-zip/index.d.ts @@ -0,0 +1,380 @@ +// Type definitions for adm-zip 0.5 +// Project: https://github.com/cthackers/adm-zip +// Definitions by: John Vilk +// Abner Oliveira +// BendingBender +// Matthew Sainsbury +// Lei Nelissen +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// + +import * as FS from 'fs'; +import { Constants } from './util'; + +declare class AdmZip { + /** + * @param fileNameOrRawData If provided, reads an existing archive. Otherwise creates a new, empty archive. + * @param options Options when initializing the ZIP file + */ + constructor(fileNameOrRawData?: string | Buffer, options?: Partial); + /** + * Extracts the given entry from the archive and returns the content as a Buffer object + * @param entry ZipEntry object or String with the full path of the entry + * @param pass Password used for decrypting the file + * @return Buffer or Null in case of error + */ + readFile(entry: string | AdmZip.IZipEntry, pass?: string | Buffer): Buffer | null; + /** + * Asynchronous `readFile`. + * @param entry The full path of the entry or a `IZipEntry` object. + * @param callback Called with a `Buffer` or `null` in case of error. + */ + readFileAsync(entry: string | AdmZip.IZipEntry, callback: (data: Buffer | null, err: string) => void): void; + /** + * Extracts the given entry from the archive and returns the content as + * plain text in the given encoding. + * @param entry The full path of the entry or a `IZipEntry` object. + * @param encoding If no encoding is specified `"utf8"` is used. + */ + readAsText(fileName: string | AdmZip.IZipEntry, encoding?: string): string; + /** + * Asynchronous `readAsText`. + * @param entry The full path of the entry or a `IZipEntry` object. + * @param callback Called with the resulting string. + * @param encoding If no encoding is specified `"utf8"` is used. + */ + readAsTextAsync( + fileName: string | AdmZip.IZipEntry, + callback: (data: string, err: string) => void, + encoding?: string + ): void; + /** + * Remove the entry from the file or the entry and all its nested directories + * and files if the given entry is a directory. + * @param entry The full path of the entry or a `IZipEntry` object. + */ + deleteFile(entry: string | AdmZip.IZipEntry): void; + /** + * Adds a comment to the zip. The zip must be rewritten after + * adding the comment. + * @param comment Content of the comment. + */ + addZipComment(comment: string): void; + /** + * @return The zip comment. + */ + getZipComment(): string; + /** + * Adds a comment to a specified file or `IZipEntry`. The zip must be rewritten after + * adding the comment. + * The comment cannot exceed 65535 characters in length. + * @param entry The full path of the entry or a `IZipEntry` object. + * @param comment The comment to add to the entry. + */ + addZipEntryComment(entry: string | AdmZip.IZipEntry, comment: string): void; + /** + * Returns the comment of the specified entry. + * @param entry The full path of the entry or a `IZipEntry` object. + * @return The comment of the specified entry. + */ + getZipEntryComment(entry: string | AdmZip.IZipEntry): string; + /** + * Updates the content of an existing entry inside the archive. The zip + * must be rewritten after updating the content. + * @param entry The full path of the entry or a `IZipEntry` object. + * @param content The entry's new contents. + */ + updateFile(entry: string | AdmZip.IZipEntry, content: Buffer): void; + /** + * Adds a file from the disk to the archive. + * @param localPath Path to a file on disk. + * @param zipPath Path to a directory in the archive. Defaults to the empty + * string. + * @param zipName Name for the file. + * @param comment Comment to be attached to the file + */ + addLocalFile(localPath: string, zipPath?: string, zipName?: string, comment?: string): void; + /** + * Adds a local directory and all its nested files and directories to the + * archive. + * @param localPath Path to a folder on disk. + * @param zipPath Path to a folder in the archive. Default: `""`. + * @param filter RegExp or Function if files match will be included. + */ + addLocalFolder(localPath: string, zipPath?: string, filter?: RegExp | ((filename: string) => boolean)): void; + /** + * Asynchronous addLocalFile + * @param localPath + * @param callback + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + */ + addLocalFolderAsync( + localPath: string, + callback: (success?: boolean, err?: string) => void, + zipPath?: string, + filter?: RegExp | ((filename: string) => boolean) + ): void; + /** + * + * @param localPath - path where files will be extracted + * @param props - optional properties + * @param props.zipPath - optional path inside zip + * @param props.filter - RegExp or Function if files match will be included. + */ + addLocalFolderPromise( + localPath: string, + props: { zipPath?: string, filter?: RegExp | ((filename: string) => boolean) } + ): Promise; + /** + * Allows you to create a entry (file or directory) in the zip file. + * If you want to create a directory the `entryName` must end in `"/"` and a `null` + * buffer should be provided. + * @param entryName Entry path. + * @param content Content to add to the entry; must be a 0-length buffer + * for a directory. + * @param comment Comment to add to the entry. + * @param attr Attribute to add to the entry. + */ + addFile(entryName: string, content: Buffer, comment?: string, attr?: number): void; + /** + * Returns an array of `IZipEntry` objects representing the files and folders + * inside the archive. + */ + getEntries(): AdmZip.IZipEntry[]; + /** + * Returns a `IZipEntry` object representing the file or folder specified by `name`. + * @param name Name of the file or folder to retrieve. + * @return The entry corresponding to the `name`. + */ + getEntry(name: string): AdmZip.IZipEntry | null; + /** + * Returns the number of entries in the ZIP + * @return The amount of entries in the ZIP + */ + getEntryCount(): number; + /** + * Loop through each entry in the ZIP + * @param callback The callback that receives each individual entry + */ + forEach(callback: (entry: AdmZip.IZipEntry) => void): void; + /** + * Extracts the given entry to the given `targetPath`. + * If the entry is a directory inside the archive, the entire directory and + * its subdirectories will be extracted. + * @param entry The full path of the entry or a `IZipEntry` object. + * @param targetPath Target folder where to write the file. + * @param maintainEntryPath If maintainEntryPath is `true` and the entry is + * inside a folder, the entry folder will be created in `targetPath` as + * well. Default: `true`. + * @param overwrite If the file already exists at the target path, the file + * will be overwriten if this is `true`. Default: `false`. + * @param keepOriginalPermission The file will be set as the permission from + * the entry if this is true. Default: `false`. + * @param outFileName String If set will override the filename of the + * extracted file (Only works if the entry is a file) + * @return Boolean + */ + extractEntryTo( + entryPath: string | AdmZip.IZipEntry, + targetPath: string, + maintainEntryPath?: boolean, + overwrite?: boolean, + keepOriginalPermission?: boolean, + outFileName?: string, + ): boolean; + /** + * Test the archive + * @param password The password for the archive + */ + test(password?: string | Buffer): boolean; + /** + * Extracts the entire archive to the given location. + * @param targetPath Target location. + * @param overwrite If the file already exists at the target path, the file + * will be overwriten if this is `true`. Default: `false`. + * @param keepOriginalPermission The file will be set as the permission from + * the entry if this is true. Default: `false`. + * @param password The password for the archive + */ + extractAllTo( + targetPath: string, + overwrite?: boolean, + keepOriginalPermission?: boolean, + password?: string | Buffer + ): void; + /** + * Extracts the entire archive to the given location. + * @param targetPath Target location. + * @param overwrite If the file already exists at the target path, the file + * will be overwriten if this is `true`. Default: `false`. + * @param keepOriginalPermission The file will be set as the permission from + * the entry if this is true. Default: `false`. + * @param callback The callback function will be called after extraction. + */ + extractAllToAsync( + targetPath: string, + overwrite?: boolean, + keepOriginalPermission?: boolean, + callback?: (error?: Error) => void, + ): void; + /** + * Writes the newly created zip file to disk at the specified location or + * if a zip was opened and no `targetFileName` is provided, it will + * overwrite the opened zip. + */ + writeZip(targetFileName?: string, callback?: (error: Error | null) => void): void; + /** + * Writes the newly created zip file to disk at the specified location or + * if a zip was opened and no `targetFileName` is provided, it will + * overwrite the opened zip. + */ + writeZipPromise( + targetFileName?: string, + props?: { overwrite?: boolean, perm?: number } + ): Promise; + /** + * Returns the content of the entire zip file. + */ + toBuffer(): Buffer; + /** + * Asynchronously returns the content of the entire zip file. + * @param onSuccess called with the content of the zip file, once it has been generated. + * @param onFail unused. + * @param onItemStart called before an entry is compressed. + * @param onItemEnd called after an entry is compressed. + */ + toBuffer( + onSuccess: (buffer: Buffer) => void, + onFail?: (...args: any[]) => void, + onItemStart?: (name: string) => void, + onItemEnd?: (name: string) => void, + ): void; + /** + * Asynchronously convert the promise to a Buffer + */ + toBufferPromise(): Promise; +} + +declare namespace AdmZip { + /** + * The `IZipEntry` is more than a structure representing the entry inside the + * zip file. Beside the normal attributes and headers a entry can have, the + * class contains a reference to the part of the file where the compressed + * data resides and decompresses it when requested. It also compresses the + * data and creates the headers required to write in the zip file. + */ + // disable warning about the I-prefix in interface name to prevent breaking stuff for users without a major bump + // tslint:disable-next-line:interface-name + interface IZipEntry { + /** + * Represents the full name and path of the file + */ + entryName: string; + readonly rawEntryName: Buffer; + /** + * Extra data associated with this entry. + */ + extra: Buffer; + /** + * Entry comment. + */ + comment: string; + readonly name: string; + /** + * Read-Only property that indicates the type of the entry. + */ + readonly isDirectory: boolean; + /** + * Get the header associated with this ZipEntry. + */ + readonly header: EntryHeader; + attr: number; + /** + * Retrieve the compressed data for this entry. Note that this may trigger + * compression if any properties were modified. + */ + getCompressedData(): Buffer; + /** + * Asynchronously retrieve the compressed data for this entry. Note that + * this may trigger compression if any properties were modified. + */ + getCompressedDataAsync(callback: (data: Buffer) => void): void; + /** + * Set the (uncompressed) data to be associated with this entry. + */ + setData(value: string | Buffer): void; + /** + * Get the decompressed data associated with this entry. + */ + getData(): Buffer; + /** + * Asynchronously get the decompressed data associated with this entry. + */ + getDataAsync(callback: (data: Buffer, err: string) => void): void; + /** + * Returns the CEN Entry Header to be written to the output zip file, plus + * the extra data and the entry comment. + */ + packHeader(): Buffer; + /** + * Returns a nicely formatted string with the most important properties of + * the ZipEntry. + */ + toString(): string; + } + + interface EntryHeader { + made: number; + version: number; + flags: number; + method: number; + time: Date; + crc: number; + compressedSize: number; + size: number; + fileNameLength: number; + extraLength: number; + commentLength: number; + diskNumStart: number; + inAttr: number; + attr: number; + offset: number; + readonly encripted: boolean; + readonly entryHeaderSize: number; + readonly realDataOffset: number; + readonly dataHeader: DataHeader; + loadDataHeaderFromBinary(data: Buffer): void; + loadFromBinary(data: Buffer): void; + dataHeaderToBinary(): Buffer; + entryHeaderToBinary(): Buffer; + toString(): string; + } + + interface DataHeader { + version: number; + flags: number; + method: number; + time: number; + crc: number; + compressedSize: number; + size: number; + fnameLen: number; + extraLen: number; + } + + interface InitOptions { + /* If true it disables files sorting */ + noSort: boolean; + /* Read entries during load (initial loading may be slower) */ + readEntries: boolean; + /* Read method */ + method: typeof Constants[keyof typeof Constants] | number; + /* file system */ + fs: null | typeof FS; + } +} + +export = AdmZip; diff --git a/node_modules/@types/adm-zip/package.json b/node_modules/@types/adm-zip/package.json new file mode 100755 index 000000000..b465979fb --- /dev/null +++ b/node_modules/@types/adm-zip/package.json @@ -0,0 +1,47 @@ +{ + "name": "@types/adm-zip", + "version": "0.5.0", + "description": "TypeScript definitions for adm-zip", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/adm-zip", + "license": "MIT", + "contributors": [ + { + "name": "John Vilk", + "url": "https://github.com/jvilk", + "githubUsername": "jvilk" + }, + { + "name": "Abner Oliveira", + "url": "https://github.com/abner", + "githubUsername": "abner" + }, + { + "name": "BendingBender", + "url": "https://github.com/BendingBender", + "githubUsername": "BendingBender" + }, + { + "name": "Matthew Sainsbury", + "url": "https://github.com/mattsains", + "githubUsername": "mattsains" + }, + { + "name": "Lei Nelissen", + "url": "https://github.com/LeiNelissen", + "githubUsername": "LeiNelissen" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/adm-zip" + }, + "scripts": {}, + "dependencies": { + "@types/node": "*" + }, + "typesPublisherContentHash": "e7f9407982926b4743453eb7e13bc93b66f3e599200f0d101fe305016af41e50", + "typeScriptVersion": "3.9" +} \ No newline at end of file diff --git a/node_modules/@types/adm-zip/util.d.ts b/node_modules/@types/adm-zip/util.d.ts new file mode 100755 index 000000000..dacae5fce --- /dev/null +++ b/node_modules/@types/adm-zip/util.d.ts @@ -0,0 +1,142 @@ +export const Constants: { + /* The local file header */ + LOCHDR: 30; // LOC header size + LOCSIG: 0x04034b50; // "PK\003\004" + LOCVER: 4; // version needed to extract + LOCFLG: 6; // general purpose bit flag + LOCHOW: 8; // compression method + LOCTIM: 10; // modification time (2 bytes time, 2 bytes date) + LOCCRC: 14; // uncompressed file crc-32 value + LOCSIZ: 18; // compressed size + LOCLEN: 22; // uncompressed size + LOCNAM: 26; // filename length + LOCEXT: 28; // extra field length + + /* The Data descriptor */ + EXTSIG: 0x08074b50; // "PK\007\008" + EXTHDR: 16; // EXT header size + EXTCRC: 4; // uncompressed file crc-32 value + EXTSIZ: 8; // compressed size + EXTLEN: 12; // uncompressed size + + /* The central directory file header */ + CENHDR: 46; // CEN header size + CENSIG: 0x02014b50; // "PK\001\002" + CENVEM: 4; // version made by + CENVER: 6; // version needed to extract + CENFLG: 8; // encrypt, decrypt flags + CENHOW: 10; // compression method + CENTIM: 12; // modification time (2 bytes time, 2 bytes date) + CENCRC: 16; // uncompressed file crc-32 value + CENSIZ: 20; // compressed size + CENLEN: 24; // uncompressed size + CENNAM: 28; // filename length + CENEXT: 30; // extra field length + CENCOM: 32; // file comment length + CENDSK: 34; // volume number start + CENATT: 36; // internal file attributes + CENATX: 38; // external file attributes (host system dependent) + CENOFF: 42; // LOC header offset + + /* The entries in the end of central directory */ + ENDHDR: 22; // END header size + ENDSIG: 0x06054b50; // "PK\005\006" + ENDSUB: 8; // number of entries on this disk + ENDTOT: 10; // total number of entries + ENDSIZ: 12; // central directory size in bytes + ENDOFF: 16; // offset of first CEN header + ENDCOM: 20; // zip file comment length + + END64HDR: 20; // zip64 END header size + END64SIG: 0x07064b50; // zip64 Locator signature, "PK\006\007" + END64START: 4; // number of the disk with the start of the zip64 + END64OFF: 8; // relative offset of the zip64 end of central directory + END64NUMDISKS: 16; // total number of disks + + ZIP64SIG: 0x06064b50; // zip64 signature, "PK\006\006" + ZIP64HDR: 56; // zip64 record minimum size + ZIP64LEAD: 12; // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE + ZIP64SIZE: 4; // zip64 size of the central directory record + ZIP64VEM: 12; // zip64 version made by + ZIP64VER: 14; // zip64 version needed to extract + ZIP64DSK: 16; // zip64 number of this disk + ZIP64DSKDIR: 20; // number of the disk with the start of the record directory + ZIP64SUB: 24; // number of entries on this disk + ZIP64TOT: 32; // total number of entries + ZIP64SIZB: 40; // zip64 central directory size in bytes + ZIP64OFF: 48; // offset of start of central directory with respect to the starting disk number + ZIP64EXTRA: 56; // extensible data sector + + /* Compression methods */ + STORED: 0; // no compression + SHRUNK: 1; // shrunk + REDUCED1: 2; // reduced with compression factor 1 + REDUCED2: 3; // reduced with compression factor 2 + REDUCED3: 4; // reduced with compression factor 3 + REDUCED4: 5; // reduced with compression factor 4 + IMPLODED: 6; // imploded + // 7 reserved for Tokenizing compression algorithm + DEFLATED: 8; // deflated + ENHANCED_DEFLATED: 9; // enhanced deflated + PKWARE: 10; // PKWare DCL imploded + // 11 reserved by PKWARE + BZIP2: 12; // compressed using BZIP2 + // 13 reserved by PKWARE + LZMA: 14; // LZMA + // 15-17 reserved by PKWARE + IBM_TERSE: 18; // compressed using IBM TERSE + IBM_LZ77: 19; // IBM LZ77 z + AES_ENCRYPT: 99; // WinZIP AES encryption method + + /* General purpose bit flag */ + // values can obtained with expression 2**bitnr + FLG_ENC: 1; // Bit 0: encrypted file + FLG_COMP1: 2; // Bit 1, compression option + FLG_COMP2: 4; // Bit 2, compression option + FLG_DESC: 8; // Bit 3, data descriptor + FLG_ENH: 16; // Bit 4, enhanced deflating + FLG_PATCH: 32; // Bit 5, indicates that the file is compressed patched data. + FLG_STR: 64; // Bit 6, strong encryption (patented) + // Bits 7-10: Currently unused. + FLG_EFS: 2048; // Bit 11: Language encoding flag (EFS) + // Bit 12: Reserved by PKWARE for enhanced compression. + // Bit 13: encrypted the Central Directory (patented). + // Bits 14-15: Reserved by PKWARE. + FLG_MSK: 4096; // mask header values + + /* Load type */ + FILE: 2; + BUFFER: 1; + NONE: 0; + + /* 4.5 Extensible data fields */ + EF_ID: 0; + EF_SIZE: 2; + + /* Header IDs */ + ID_ZIP64: 0x0001; + ID_AVINFO: 0x0007; + ID_PFS: 0x0008; + ID_OS2: 0x0009; + ID_NTFS: 0x000a; + ID_OPENVMS: 0x000c; + ID_UNIX: 0x000d; + ID_FORK: 0x000e; + ID_PATCH: 0x000f; + ID_X509_PKCS7: 0x0014; + ID_X509_CERTID_F: 0x0015; + ID_X509_CERTID_C: 0x0016; + ID_STRONGENC: 0x0017; + ID_RECORD_MGT: 0x0018; + ID_X509_PKCS7_RL: 0x0019; + ID_IBM1: 0x0065; + ID_IBM2: 0x0066; + ID_POSZIP: 0x4690; + + EF_ZIP64_OR_32: 0xffffffff; + EF_ZIP64_OR_16: 0xffff; + EF_ZIP64_SUNCOMP: 0; + EF_ZIP64_SCOMP: 8; + EF_ZIP64_RHO: 16; + EF_ZIP64_DSN: 24; +}; diff --git a/node_modules/adm-zip/LICENSE b/node_modules/adm-zip/LICENSE new file mode 100644 index 000000000..3f2f732ab --- /dev/null +++ b/node_modules/adm-zip/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2012 Another-D-Mention Software and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/adm-zip/README.md b/node_modules/adm-zip/README.md new file mode 100644 index 000000000..60d7a12b7 --- /dev/null +++ b/node_modules/adm-zip/README.md @@ -0,0 +1,65 @@ +# ADM-ZIP for NodeJS with added support for electron original-fs + +ADM-ZIP is a pure JavaScript implementation for zip data compression for [NodeJS](https://nodejs.org/). + +# Installation + +With [npm](https://www.npmjs.com/) do: + + $ npm install adm-zip + +## What is it good for? + +The library allows you to: + +- decompress zip files directly to disk or in memory buffers +- compress files and store them to disk in .zip format or in compressed buffers +- update content of/add new/delete files from an existing .zip + +# Dependencies + +There are no other nodeJS libraries that ADM-ZIP is dependent of + +# Examples + +## Basic usage + +```javascript +var AdmZip = require("adm-zip"); + +// reading archives +var zip = new AdmZip("./my_file.zip"); +var zipEntries = zip.getEntries(); // an array of ZipEntry records + +zipEntries.forEach(function (zipEntry) { + console.log(zipEntry.toString()); // outputs zip entries information + if (zipEntry.entryName == "my_file.txt") { + console.log(zipEntry.getData().toString("utf8")); + } +}); +// outputs the content of some_folder/my_file.txt +console.log(zip.readAsText("some_folder/my_file.txt")); +// extracts the specified file to the specified location +zip.extractEntryTo(/*entry name*/ "some_folder/my_file.txt", /*target path*/ "/home/me/tempfolder", /*maintainEntryPath*/ false, /*overwrite*/ true); +// extracts everything +zip.extractAllTo(/*target path*/ "/home/me/zipcontent/", /*overwrite*/ true); + +// creating archives +var zip = new AdmZip(); + +// add file directly +var content = "inner content of the file"; +zip.addFile("test.txt", Buffer.from(content, "utf8"), "entry comment goes here"); +// add local file +zip.addLocalFile("/home/me/some_picture.png"); +// get everything as a buffer +var willSendthis = zip.toBuffer(); +// or write everything to disk +zip.writeZip(/*target file name*/ "/home/me/files.zip"); + +// ... more examples in the wiki +``` + +For more detailed information please check out the [wiki](https://github.com/cthackers/adm-zip/wiki). + +[![Build Status](https://travis-ci.org/cthackers/adm-zip.svg?branch=master)](https://travis-ci.org/cthackers/adm-zip) diff --git a/node_modules/adm-zip/adm-zip.js b/node_modules/adm-zip/adm-zip.js new file mode 100644 index 000000000..a4dfca600 --- /dev/null +++ b/node_modules/adm-zip/adm-zip.js @@ -0,0 +1,783 @@ +const Utils = require("./util"); +const pth = require("path"); +const ZipEntry = require("./zipEntry"); +const ZipFile = require("./zipFile"); + +const get_Bool = (val, def) => (typeof val === "boolean" ? val : def); +const get_Str = (val, def) => (typeof val === "string" ? val : def); + +const defaultOptions = { + // option "noSort" : if true it disables files sorting + noSort: false, + // read entries during load (initial loading may be slower) + readEntries: false, + // default method is none + method: Utils.Constants.NONE, + // file system + fs: null +}; + +module.exports = function (/**String*/ input, /** object */ options) { + let inBuffer = null; + + // create object based default options, allowing them to be overwritten + const opts = Object.assign(Object.create(null), defaultOptions); + + // test input variable + if (input && "object" === typeof input) { + // if value is not buffer we accept it to be object with options + if (!(input instanceof Uint8Array)) { + Object.assign(opts, input); + input = opts.input ? opts.input : undefined; + if (opts.input) delete opts.input; + } + + // if input is buffer + if (Buffer.isBuffer(input)) { + inBuffer = input; + opts.method = Utils.Constants.BUFFER; + input = undefined; + } + } + + // assign options + Object.assign(opts, options); + + // instanciate utils filesystem + const filetools = new Utils(opts); + + // if input is file name we retrieve its content + if (input && "string" === typeof input) { + // load zip file + if (filetools.fs.existsSync(input)) { + opts.method = Utils.Constants.FILE; + opts.filename = input; + inBuffer = filetools.fs.readFileSync(input); + } else { + throw new Error(Utils.Errors.INVALID_FILENAME); + } + } + + // create variable + const _zip = new ZipFile(inBuffer, opts); + + const { canonical, sanitize } = Utils; + + function getEntry(/**Object*/ entry) { + if (entry && _zip) { + var item; + // If entry was given as a file name + if (typeof entry === "string") item = _zip.getEntry(entry); + // if entry was given as a ZipEntry object + if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName); + + if (item) { + return item; + } + } + return null; + } + + function fixPath(zipPath) { + const { join, normalize, sep } = pth.posix; + // convert windows file separators and normalize + return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep)); + } + + return { + /** + * Extracts the given entry from the archive and returns the content as a Buffer object + * @param entry ZipEntry object or String with the full path of the entry + * + * @return Buffer or Null in case of error + */ + readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) { + var item = getEntry(entry); + return (item && item.getData(pass)) || null; + }, + + /** + * Asynchronous readFile + * @param entry ZipEntry object or String with the full path of the entry + * @param callback + * + * @return Buffer or Null in case of error + */ + readFileAsync: function (/**Object*/ entry, /**Function*/ callback) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(callback); + } else { + callback(null, "getEntry failed for:" + entry); + } + }, + + /** + * Extracts the given entry from the archive and returns the content as plain text in the given encoding + * @param entry ZipEntry object or String with the full path of the entry + * @param encoding Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsText: function (/**Object*/ entry, /**String=*/ encoding) { + var item = getEntry(entry); + if (item) { + var data = item.getData(); + if (data && data.length) { + return data.toString(encoding || "utf8"); + } + } + return ""; + }, + + /** + * Asynchronous readAsText + * @param entry ZipEntry object or String with the full path of the entry + * @param callback + * @param encoding Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(function (data, err) { + if (err) { + callback(data, err); + return; + } + + if (data && data.length) { + callback(data.toString(encoding || "utf8")); + } else { + callback(""); + } + }); + } else { + callback(""); + } + }, + + /** + * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory + * + * @param entry + */ + deleteFile: function (/**Object*/ entry) { + // @TODO: test deleteFile + var item = getEntry(entry); + if (item) { + _zip.deleteEntry(item.entryName); + } + }, + + /** + * Adds a comment to the zip. The zip must be rewritten after adding the comment. + * + * @param comment + */ + addZipComment: function (/**String*/ comment) { + // @TODO: test addZipComment + _zip.comment = comment; + }, + + /** + * Returns the zip comment + * + * @return String + */ + getZipComment: function () { + return _zip.comment || ""; + }, + + /** + * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment + * The comment cannot exceed 65535 characters in length + * + * @param entry + * @param comment + */ + addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) { + var item = getEntry(entry); + if (item) { + item.comment = comment; + } + }, + + /** + * Returns the comment of the specified entry + * + * @param entry + * @return String + */ + getZipEntryComment: function (/**Object*/ entry) { + var item = getEntry(entry); + if (item) { + return item.comment || ""; + } + return ""; + }, + + /** + * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content + * + * @param entry + * @param content + */ + updateFile: function (/**Object*/ entry, /**Buffer*/ content) { + var item = getEntry(entry); + if (item) { + item.setData(content); + } + }, + + /** + * Adds a file from the disk to the archive + * + * @param localPath File to add to zip + * @param zipPath Optional path inside the zip + * @param zipName Optional name for the file + */ + addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) { + if (filetools.fs.existsSync(localPath)) { + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + + // p - local file name + var p = localPath.split("\\").join("/").split("/").pop(); + + // add file name into zippath + zipPath += zipName ? zipName : p; + + // read file attributes + const _attr = filetools.fs.statSync(localPath); + + // add file into zip file + this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr); + } else { + throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } + }, + + /** + * Adds a local directory and all its nested files and directories to the archive + * + * @param localPath + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + */ + addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter) { + // Prepare filter + if (filter instanceof RegExp) { + // if filter is RegExp wrap it + filter = (function (rx) { + return function (filename) { + return rx.test(filename); + }; + })(filter); + } else if ("function" !== typeof filter) { + // if filter is not function we will replace it + filter = function () { + return true; + }; + } + + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + + // normalize the path first + localPath = pth.normalize(localPath); + + if (filetools.fs.existsSync(localPath)) { + const items = filetools.findFiles(localPath); + const self = this; + + if (items.length) { + items.forEach(function (filepath) { + var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix + if (filter(p)) { + var stats = filetools.fs.statSync(filepath); + if (stats.isFile()) { + self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", stats); + } else { + self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats); + } + } + }); + } + } else { + throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } + }, + + /** + * Asynchronous addLocalFile + * @param localPath + * @param callback + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + */ + addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) { + if (filter instanceof RegExp) { + filter = (function (rx) { + return function (filename) { + return rx.test(filename); + }; + })(filter); + } else if ("function" !== typeof filter) { + filter = function () { + return true; + }; + } + + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + + // normalize the path first + localPath = pth.normalize(localPath); + + var self = this; + filetools.fs.open(localPath, "r", function (err) { + if (err && err.code === "ENOENT") { + callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } else if (err) { + callback(undefined, err); + } else { + var items = filetools.findFiles(localPath); + var i = -1; + + var next = function () { + i += 1; + if (i < items.length) { + var filepath = items[i]; + var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix + p = p + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") + .replace(/[^\x20-\x7E]/g, ""); // accent fix + if (filter(p)) { + filetools.fs.stat(filepath, function (er0, stats) { + if (er0) callback(undefined, er0); + if (stats.isFile()) { + filetools.fs.readFile(filepath, function (er1, data) { + if (er1) { + callback(undefined, er1); + } else { + self.addFile(zipPath + p, data, "", stats); + next(); + } + }); + } else { + self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats); + next(); + } + }); + } else { + next(); + } + } else { + callback(true, undefined); + } + }; + + next(); + } + }); + }, + + /** + * + * @param {string} localPath - path where files will be extracted + * @param {object} props - optional properties + * @param {string} props.zipPath - optional path inside zip + * @param {regexp, function} props.filter - RegExp or Function if files match will be included. + */ + addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) { + return new Promise((resolve, reject) => { + const { filter, zipPath } = Object.assign({}, props); + this.addLocalFolderAsync( + localPath, + (done, err) => { + if (err) reject(err); + if (done) resolve(this); + }, + zipPath, + filter + ); + }); + }, + + /** + * Allows you to create a entry (file or directory) in the zip file. + * If you want to create a directory the entryName must end in / and a null buffer should be provided. + * Comment and attributes are optional + * + * @param {string} entryName + * @param {Buffer | string} content - file content as buffer or utf8 coded string + * @param {string} comment - file comment + * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object + */ + addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) { + let entry = getEntry(entryName); + const update = entry != null; + + // prepare new entry + if (!update) { + entry = new ZipEntry(); + entry.entryName = entryName; + } + entry.comment = comment || ""; + + const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats; + + // last modification time from file stats + if (isStat) { + entry.header.time = attr.mtime; + } + + // Set file attribute + var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag) + + // extended attributes field for Unix + if (!Utils.isWin) { + // set file type either S_IFDIR / S_IFREG + let unix = entry.isDirectory ? 0x4000 : 0x8000; + + if (isStat) { + // File attributes from file stats + unix |= 0xfff & attr.mode; + } else if ("number" === typeof attr) { + // attr from given attr values + unix |= 0xfff & attr; + } else { + // Default values: + unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) + } + + fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes + } + + entry.attr = fileattr; + + entry.setData(content); + if (!update) _zip.setEntry(entry); + }, + + /** + * Returns an array of ZipEntry objects representing the files and folders inside the archive + * + * @return Array + */ + getEntries: function () { + return _zip ? _zip.entries : []; + }, + + /** + * Returns a ZipEntry object representing the file or folder specified by ``name``. + * + * @param name + * @return ZipEntry + */ + getEntry: function (/**String*/ name) { + return getEntry(name); + }, + + getEntryCount: function () { + return _zip.getEntryCount(); + }, + + forEach: function (callback) { + return _zip.forEach(callback); + }, + + /** + * Extracts the given entry to the given targetPath + * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted + * + * @param entry ZipEntry object or String with the full path of the entry + * @param targetPath Target folder where to write the file + * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder + * will be created in targetPath as well. Default is TRUE + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file) + * + * @return Boolean + */ + extractEntryTo: function ( + /**Object*/ entry, + /**String*/ targetPath, + /**Boolean*/ maintainEntryPath, + /**Boolean*/ overwrite, + /**Boolean*/ keepOriginalPermission, + /**String**/ outFileName + ) { + overwrite = get_Bool(overwrite, false); + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + maintainEntryPath = get_Bool(maintainEntryPath, true); + outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined)); + + var item = getEntry(entry); + if (!item) { + throw new Error(Utils.Errors.NO_ENTRY); + } + + var entryName = canonical(item.entryName); + + var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName)); + + if (item.isDirectory) { + var children = _zip.getEntryChildren(item); + children.forEach(function (child) { + if (child.isDirectory) return; + var content = child.getData(); + if (!content) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + var name = canonical(child.entryName); + var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined; + filetools.writeFileTo(childName, content, overwrite, fileAttr); + }); + return true; + } + + var content = item.getData(); + if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + + if (filetools.fs.existsSync(target) && !overwrite) { + throw new Error(Utils.Errors.CANT_OVERRIDE); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(target, content, overwrite, fileAttr); + + return true; + }, + + /** + * Test the archive + * + */ + test: function (pass) { + if (!_zip) { + return false; + } + + for (var entry in _zip.entries) { + try { + if (entry.isDirectory) { + continue; + } + var content = _zip.entries[entry].getData(pass); + if (!content) { + return false; + } + } catch (err) { + return false; + } + } + return true; + }, + + /** + * Extracts the entire archive to the given location + * + * @param targetPath Target location + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + */ + extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) { + overwrite = get_Bool(overwrite, false); + pass = get_Str(keepOriginalPermission, pass); + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + if (!_zip) { + throw new Error(Utils.Errors.NO_ZIP); + } + _zip.entries.forEach(function (entry) { + var entryName = sanitize(targetPath, canonical(entry.entryName.toString())); + if (entry.isDirectory) { + filetools.makeDir(entryName); + return; + } + var content = entry.getData(pass); + if (!content) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(entryName, content, overwrite, fileAttr); + try { + filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time); + } catch (err) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + }); + }, + + /** + * Asynchronous extractAllTo + * + * @param targetPath Target location + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + * @param callback The callback will be executed when all entries are extracted successfully or any error is thrown. + */ + extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) { + if (!callback) { + callback = function () {}; + } + overwrite = get_Bool(overwrite, false); + if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission; + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + if (!_zip) { + callback(new Error(Utils.Errors.NO_ZIP)); + return; + } + + targetPath = pth.resolve(targetPath); + // convert entryName to + const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString()))); + const getError = (msg, file) => new Error(msg + ': "' + file + '"'); + + // separate directories from files + const dirEntries = []; + const fileEntries = new Set(); + _zip.entries.forEach((e) => { + if (e.isDirectory) { + dirEntries.push(e); + } else { + fileEntries.add(e); + } + }); + + // Create directory entries first synchronously + // this prevents race condition and assures folders are there before writing files + for (const entry of dirEntries) { + const dirPath = getPath(entry); + // The reverse operation for attr depend on method addFile() + const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + try { + filetools.makeDir(dirPath); + if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr); + // in unix timestamp will change if files are later added to folder, but still + filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time); + } catch (er) { + callback(getError("Unable to create folder", dirPath)); + } + } + + // callback wrapper, for some house keeping + const done = () => { + if (fileEntries.size === 0) { + callback(); + } + }; + + // Extract file entries asynchronously + for (const entry of fileEntries.values()) { + const entryName = pth.normalize(canonical(entry.entryName.toString())); + const filePath = sanitize(targetPath, entryName); + entry.getDataAsync(function (content, err_1) { + if (err_1) { + callback(new Error(err_1)); + return; + } + if (!content) { + callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); + } else { + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { + if (!succ) { + callback(getError("Unable to write file", filePath)); + return; + } + filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { + if (err_2) { + callback(getError("Unable to set times", filePath)); + return; + } + fileEntries.delete(entry); + // call the callback if it was last entry + done(); + }); + }); + } + }); + } + // call the callback if fileEntries was empty + done(); + }, + + /** + * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip + * + * @param targetFileName + * @param callback + */ + writeZip: function (/**String*/ targetFileName, /**Function*/ callback) { + if (arguments.length === 1) { + if (typeof targetFileName === "function") { + callback = targetFileName; + targetFileName = ""; + } + } + + if (!targetFileName && opts.filename) { + targetFileName = opts.filename; + } + if (!targetFileName) return; + + var zipData = _zip.compressToBuffer(); + if (zipData) { + var ok = filetools.writeFileTo(targetFileName, zipData, true); + if (typeof callback === "function") callback(!ok ? new Error("failed") : null, ""); + } + }, + + writeZipPromise: function (/**String*/ targetFileName, /* object */ props) { + const { overwrite, perm } = Object.assign({ overwrite: true }, props); + + return new Promise((resolve, reject) => { + // find file name + if (!targetFileName && opts.filename) targetFileName = opts.filename; + if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing"); + + this.toBufferPromise().then((zipData) => { + const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file")); + filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret); + }, reject); + }); + }, + + toBufferPromise: function () { + return new Promise((resolve, reject) => { + _zip.toAsyncBuffer(resolve, reject); + }); + }, + + /** + * Returns the content of the entire zip file as a Buffer object + * + * @return Buffer + */ + toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) { + this.valueOf = 2; + if (typeof onSuccess === "function") { + _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); + return null; + } + return _zip.compressToBuffer(); + } + }; +}; diff --git a/node_modules/adm-zip/headers/entryHeader.js b/node_modules/adm-zip/headers/entryHeader.js new file mode 100644 index 000000000..572b9a74a --- /dev/null +++ b/node_modules/adm-zip/headers/entryHeader.js @@ -0,0 +1,338 @@ +var Utils = require("../util"), + Constants = Utils.Constants; + +/* The central directory file header */ +module.exports = function () { + var _verMade = 20, // v2.0 + _version = 10, // v1.0 + _flags = 0, + _method = 0, + _time = 0, + _crc = 0, + _compressedSize = 0, + _size = 0, + _fnameLen = 0, + _extraLen = 0, + _comLen = 0, + _diskStart = 0, + _inattr = 0, + _attr = 0, + _offset = 0; + + _verMade |= Utils.isWin ? 0x0a00 : 0x0300; + + // Set EFS flag since filename and comment fields are all by default encoded using UTF-8. + // Without it file names may be corrupted for other apps when file names use unicode chars + _flags |= Constants.FLG_EFS; + + var _dataHeader = {}; + + function setTime(val) { + val = new Date(val); + _time = + (((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980 + ((val.getMonth() + 1) << 21) | // b05-08 month + (val.getDate() << 16) | // b00-04 hour + // 2 bytes time + (val.getHours() << 11) | // b11-15 hour + (val.getMinutes() << 5) | // b05-10 minute + (val.getSeconds() >> 1); // b00-04 seconds divided by 2 + } + + setTime(+new Date()); + + return { + get made() { + return _verMade; + }, + set made(val) { + _verMade = val; + }, + + get version() { + return _version; + }, + set version(val) { + _version = val; + }, + + get flags() { + return _flags; + }, + set flags(val) { + _flags = val; + }, + + get method() { + return _method; + }, + set method(val) { + switch (val) { + case Constants.STORED: + this.version = 10; + case Constants.DEFLATED: + default: + this.version = 20; + } + _method = val; + }, + + get time() { + return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1); + }, + set time(val) { + setTime(val); + }, + + get crc() { + return _crc; + }, + set crc(val) { + _crc = Math.max(0, val) >>> 0; + }, + + get compressedSize() { + return _compressedSize; + }, + set compressedSize(val) { + _compressedSize = Math.max(0, val) >>> 0; + }, + + get size() { + return _size; + }, + set size(val) { + _size = Math.max(0, val) >>> 0; + }, + + get fileNameLength() { + return _fnameLen; + }, + set fileNameLength(val) { + _fnameLen = val; + }, + + get extraLength() { + return _extraLen; + }, + set extraLength(val) { + _extraLen = val; + }, + + get commentLength() { + return _comLen; + }, + set commentLength(val) { + _comLen = val; + }, + + get diskNumStart() { + return _diskStart; + }, + set diskNumStart(val) { + _diskStart = Math.max(0, val) >>> 0; + }, + + get inAttr() { + return _inattr; + }, + set inAttr(val) { + _inattr = Math.max(0, val) >>> 0; + }, + + get attr() { + return _attr; + }, + set attr(val) { + _attr = Math.max(0, val) >>> 0; + }, + + // get Unix file permissions + get fileAttr() { + return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0; + }, + + get offset() { + return _offset; + }, + set offset(val) { + _offset = Math.max(0, val) >>> 0; + }, + + get encripted() { + return (_flags & 1) === 1; + }, + + get entryHeaderSize() { + return Constants.CENHDR + _fnameLen + _extraLen + _comLen; + }, + + get realDataOffset() { + return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; + }, + + get dataHeader() { + return _dataHeader; + }, + + loadDataHeaderFromBinary: function (/*Buffer*/ input) { + var data = input.slice(_offset, _offset + Constants.LOCHDR); + // 30 bytes and should start with "PK\003\004" + if (data.readUInt32LE(0) !== Constants.LOCSIG) { + throw new Error(Utils.Errors.INVALID_LOC); + } + _dataHeader = { + // version needed to extract + version: data.readUInt16LE(Constants.LOCVER), + // general purpose bit flag + flags: data.readUInt16LE(Constants.LOCFLG), + // compression method + method: data.readUInt16LE(Constants.LOCHOW), + // modification time (2 bytes time, 2 bytes date) + time: data.readUInt32LE(Constants.LOCTIM), + // uncompressed file crc-32 value + crc: data.readUInt32LE(Constants.LOCCRC), + // compressed size + compressedSize: data.readUInt32LE(Constants.LOCSIZ), + // uncompressed size + size: data.readUInt32LE(Constants.LOCLEN), + // filename length + fnameLen: data.readUInt16LE(Constants.LOCNAM), + // extra field length + extraLen: data.readUInt16LE(Constants.LOCEXT) + }; + }, + + loadFromBinary: function (/*Buffer*/ data) { + // data should be 46 bytes and start with "PK 01 02" + if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) { + throw new Error(Utils.Errors.INVALID_CEN); + } + // version made by + _verMade = data.readUInt16LE(Constants.CENVEM); + // version needed to extract + _version = data.readUInt16LE(Constants.CENVER); + // encrypt, decrypt flags + _flags = data.readUInt16LE(Constants.CENFLG); + // compression method + _method = data.readUInt16LE(Constants.CENHOW); + // modification time (2 bytes time, 2 bytes date) + _time = data.readUInt32LE(Constants.CENTIM); + // uncompressed file crc-32 value + _crc = data.readUInt32LE(Constants.CENCRC); + // compressed size + _compressedSize = data.readUInt32LE(Constants.CENSIZ); + // uncompressed size + _size = data.readUInt32LE(Constants.CENLEN); + // filename length + _fnameLen = data.readUInt16LE(Constants.CENNAM); + // extra field length + _extraLen = data.readUInt16LE(Constants.CENEXT); + // file comment length + _comLen = data.readUInt16LE(Constants.CENCOM); + // volume number start + _diskStart = data.readUInt16LE(Constants.CENDSK); + // internal file attributes + _inattr = data.readUInt16LE(Constants.CENATT); + // external file attributes + _attr = data.readUInt32LE(Constants.CENATX); + // LOC header offset + _offset = data.readUInt32LE(Constants.CENOFF); + }, + + dataHeaderToBinary: function () { + // LOC header size (30 bytes) + var data = Buffer.alloc(Constants.LOCHDR); + // "PK\003\004" + data.writeUInt32LE(Constants.LOCSIG, 0); + // version needed to extract + data.writeUInt16LE(_version, Constants.LOCVER); + // general purpose bit flag + data.writeUInt16LE(_flags, Constants.LOCFLG); + // compression method + data.writeUInt16LE(_method, Constants.LOCHOW); + // modification time (2 bytes time, 2 bytes date) + data.writeUInt32LE(_time, Constants.LOCTIM); + // uncompressed file crc-32 value + data.writeUInt32LE(_crc, Constants.LOCCRC); + // compressed size + data.writeUInt32LE(_compressedSize, Constants.LOCSIZ); + // uncompressed size + data.writeUInt32LE(_size, Constants.LOCLEN); + // filename length + data.writeUInt16LE(_fnameLen, Constants.LOCNAM); + // extra field length + data.writeUInt16LE(_extraLen, Constants.LOCEXT); + return data; + }, + + entryHeaderToBinary: function () { + // CEN header size (46 bytes) + var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); + // "PK\001\002" + data.writeUInt32LE(Constants.CENSIG, 0); + // version made by + data.writeUInt16LE(_verMade, Constants.CENVEM); + // version needed to extract + data.writeUInt16LE(_version, Constants.CENVER); + // encrypt, decrypt flags + data.writeUInt16LE(_flags, Constants.CENFLG); + // compression method + data.writeUInt16LE(_method, Constants.CENHOW); + // modification time (2 bytes time, 2 bytes date) + data.writeUInt32LE(_time, Constants.CENTIM); + // uncompressed file crc-32 value + data.writeUInt32LE(_crc, Constants.CENCRC); + // compressed size + data.writeUInt32LE(_compressedSize, Constants.CENSIZ); + // uncompressed size + data.writeUInt32LE(_size, Constants.CENLEN); + // filename length + data.writeUInt16LE(_fnameLen, Constants.CENNAM); + // extra field length + data.writeUInt16LE(_extraLen, Constants.CENEXT); + // file comment length + data.writeUInt16LE(_comLen, Constants.CENCOM); + // volume number start + data.writeUInt16LE(_diskStart, Constants.CENDSK); + // internal file attributes + data.writeUInt16LE(_inattr, Constants.CENATT); + // external file attributes + data.writeUInt32LE(_attr, Constants.CENATX); + // LOC header offset + data.writeUInt32LE(_offset, Constants.CENOFF); + // fill all with + data.fill(0x00, Constants.CENHDR); + return data; + }, + + toJSON: function () { + const bytes = function (nr) { + return nr + " bytes"; + }; + + return { + made: _verMade, + version: _version, + flags: _flags, + method: Utils.methodToString(_method), + time: this.time, + crc: "0x" + _crc.toString(16).toUpperCase(), + compressedSize: bytes(_compressedSize), + size: bytes(_size), + fileNameLength: bytes(_fnameLen), + extraLength: bytes(_extraLen), + commentLength: bytes(_comLen), + diskNumStart: _diskStart, + inAttr: _inattr, + attr: _attr, + offset: _offset, + entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; diff --git a/node_modules/adm-zip/headers/index.js b/node_modules/adm-zip/headers/index.js new file mode 100644 index 000000000..b54a7222f --- /dev/null +++ b/node_modules/adm-zip/headers/index.js @@ -0,0 +1,2 @@ +exports.EntryHeader = require("./entryHeader"); +exports.MainHeader = require("./mainHeader"); diff --git a/node_modules/adm-zip/headers/mainHeader.js b/node_modules/adm-zip/headers/mainHeader.js new file mode 100644 index 000000000..06f098f70 --- /dev/null +++ b/node_modules/adm-zip/headers/mainHeader.js @@ -0,0 +1,129 @@ +var Utils = require("../util"), + Constants = Utils.Constants; + +/* The entries in the end of central directory */ +module.exports = function () { + var _volumeEntries = 0, + _totalEntries = 0, + _size = 0, + _offset = 0, + _commentLength = 0; + + return { + get diskEntries() { + return _volumeEntries; + }, + set diskEntries(/*Number*/ val) { + _volumeEntries = _totalEntries = val; + }, + + get totalEntries() { + return _totalEntries; + }, + set totalEntries(/*Number*/ val) { + _totalEntries = _volumeEntries = val; + }, + + get size() { + return _size; + }, + set size(/*Number*/ val) { + _size = val; + }, + + get offset() { + return _offset; + }, + set offset(/*Number*/ val) { + _offset = val; + }, + + get commentLength() { + return _commentLength; + }, + set commentLength(/*Number*/ val) { + _commentLength = val; + }, + + get mainHeaderSize() { + return Constants.ENDHDR + _commentLength; + }, + + loadFromBinary: function (/*Buffer*/ data) { + // data should be 22 bytes and start with "PK 05 06" + // or be 56+ bytes and start with "PK 06 06" for Zip64 + if ( + (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && + (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG) + ) { + throw new Error(Utils.Errors.INVALID_END); + } + + if (data.readUInt32LE(0) === Constants.ENDSIG) { + // number of entries on this volume + _volumeEntries = data.readUInt16LE(Constants.ENDSUB); + // total number of entries + _totalEntries = data.readUInt16LE(Constants.ENDTOT); + // central directory size in bytes + _size = data.readUInt32LE(Constants.ENDSIZ); + // offset of first CEN header + _offset = data.readUInt32LE(Constants.ENDOFF); + // zip file comment length + _commentLength = data.readUInt16LE(Constants.ENDCOM); + } else { + // number of entries on this volume + _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB); + // total number of entries + _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT); + // central directory size in bytes + _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZ); + // offset of first CEN header + _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF); + + _commentLength = 0; + } + }, + + toBinary: function () { + var b = Buffer.alloc(Constants.ENDHDR + _commentLength); + // "PK 05 06" signature + b.writeUInt32LE(Constants.ENDSIG, 0); + b.writeUInt32LE(0, 4); + // number of entries on this volume + b.writeUInt16LE(_volumeEntries, Constants.ENDSUB); + // total number of entries + b.writeUInt16LE(_totalEntries, Constants.ENDTOT); + // central directory size in bytes + b.writeUInt32LE(_size, Constants.ENDSIZ); + // offset of first CEN header + b.writeUInt32LE(_offset, Constants.ENDOFF); + // zip file comment length + b.writeUInt16LE(_commentLength, Constants.ENDCOM); + // fill comment memory with spaces so no garbage is left there + b.fill(" ", Constants.ENDHDR); + + return b; + }, + + toJSON: function () { + // creates 0x0000 style output + const offset = function (nr, len) { + let offs = nr.toString(16).toUpperCase(); + while (offs.length < len) offs = "0" + offs; + return "0x" + offs; + }; + + return { + diskEntries: _volumeEntries, + totalEntries: _totalEntries, + size: _size + " bytes", + offset: offset(_offset, 4), + commentLength: _commentLength + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; diff --git a/node_modules/adm-zip/methods/deflater.js b/node_modules/adm-zip/methods/deflater.js new file mode 100644 index 000000000..992de8f10 --- /dev/null +++ b/node_modules/adm-zip/methods/deflater.js @@ -0,0 +1,33 @@ +module.exports = function (/*Buffer*/ inbuf) { + var zlib = require("zlib"); + + var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 }; + + return { + deflate: function () { + return zlib.deflateRawSync(inbuf, opts); + }, + + deflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createDeflateRaw(opts), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); + } + }; +}; diff --git a/node_modules/adm-zip/methods/index.js b/node_modules/adm-zip/methods/index.js new file mode 100644 index 000000000..5285677fc --- /dev/null +++ b/node_modules/adm-zip/methods/index.js @@ -0,0 +1,3 @@ +exports.Deflater = require("./deflater"); +exports.Inflater = require("./inflater"); +exports.ZipCrypto = require("./zipcrypto"); diff --git a/node_modules/adm-zip/methods/inflater.js b/node_modules/adm-zip/methods/inflater.js new file mode 100644 index 000000000..3ed0d589f --- /dev/null +++ b/node_modules/adm-zip/methods/inflater.js @@ -0,0 +1,31 @@ +module.exports = function (/*Buffer*/ inbuf) { + var zlib = require("zlib"); + + return { + inflate: function () { + return zlib.inflateRawSync(inbuf); + }, + + inflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createInflateRaw(), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); + } + }; +}; diff --git a/node_modules/adm-zip/methods/zipcrypto.js b/node_modules/adm-zip/methods/zipcrypto.js new file mode 100644 index 000000000..701b5ce44 --- /dev/null +++ b/node_modules/adm-zip/methods/zipcrypto.js @@ -0,0 +1,170 @@ +"use strict"; + +// node crypt, we use it for generate salt +// eslint-disable-next-line node/no-unsupported-features/node-builtins +const { randomFillSync } = require("crypto"); + +// generate CRC32 lookup table +const crctable = new Uint32Array(256).map((t, crc) => { + for (let j = 0; j < 8; j++) { + if (0 !== (crc & 1)) { + crc = (crc >>> 1) ^ 0xedb88320; + } else { + crc >>>= 1; + } + } + return crc >>> 0; +}); + +// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits) +const uMul = (a, b) => Math.imul(a, b) >>> 0; + +// crc32 byte single update (actually same function is part of utils.crc32 function :) ) +const crc32update = (pCrc32, bval) => { + return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); +}; + +// function for generating salt for encrytion header +const genSalt = () => { + if ("function" === typeof randomFillSync) { + return randomFillSync(Buffer.alloc(12)); + } else { + // fallback if function is not defined + return genSalt.node(); + } +}; + +// salt generation with node random function (mainly as fallback) +genSalt.node = () => { + const salt = Buffer.alloc(12); + const len = salt.length; + for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff; + return salt; +}; + +// general config +const config = { + genSalt +}; + +// Class Initkeys handles same basic ops with keys +function Initkeys(pw) { + const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw); + this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); + for (let i = 0; i < pass.length; i++) { + this.updateKeys(pass[i]); + } +} + +Initkeys.prototype.updateKeys = function (byteValue) { + const keys = this.keys; + keys[0] = crc32update(keys[0], byteValue); + keys[1] += keys[0] & 0xff; + keys[1] = uMul(keys[1], 134775813) + 1; + keys[2] = crc32update(keys[2], keys[1] >>> 24); + return byteValue; +}; + +Initkeys.prototype.next = function () { + const k = (this.keys[2] | 2) >>> 0; // key + return (uMul(k, k ^ 1) >> 8) & 0xff; // decode +}; + +function make_decrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); + + // return decrypter function + return function (/*Buffer*/ data) { + // result - we create new Buffer for results + const result = Buffer.alloc(data.length); + let pos = 0; + // process input data + for (let c of data) { + //c ^= keys.next(); + //result[pos++] = c; // decode & Save Value + result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte + } + return result; + }; +} + +function make_encrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); + + // return encrypting function, result and pos is here so we dont have to merge buffers later + return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) { + // result - we create new Buffer for results + if (!result) result = Buffer.alloc(data.length); + // process input data + for (let c of data) { + const k = keys.next(); // save key byte + result[pos++] = c ^ k; // save val + keys.updateKeys(c); // update keys with decoded byte + } + return result; + }; +} + +function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) { + if (!data || !Buffer.isBuffer(data) || data.length < 12) { + return Buffer.alloc(0); + } + + // 1. We Initialize and generate decrypting function + const decrypter = make_decrypter(pwd); + + // 2. decrypt salt what is always 12 bytes and is a part of file content + const salt = decrypter(data.slice(0, 12)); + + // 3. does password meet expectations + if (salt[11] !== header.crc >>> 24) { + throw "ADM-ZIP: Wrong Password"; + } + + // 4. decode content + return decrypter(data.slice(12)); +} + +// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality +function _salter(data) { + if (Buffer.isBuffer(data) && data.length >= 12) { + // be aware - currently salting buffer data is modified + config.genSalt = function () { + return data.slice(0, 12); + }; + } else if (data === "node") { + // test salt generation with node random function + config.genSalt = genSalt.node; + } else { + // if value is not acceptable config gets reset. + config.genSalt = genSalt; + } +} + +function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) { + // 1. test data if data is not Buffer we make buffer from it + if (data == null) data = Buffer.alloc(0); + // if data is not buffer be make buffer from it + if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString()); + + // 2. We Initialize and generate encrypting function + const encrypter = make_encrypter(pwd); + + // 3. generate salt (12-bytes of random data) + const salt = config.genSalt(); + salt[11] = (header.crc >>> 24) & 0xff; + + // old implementations (before PKZip 2.04g) used two byte check + if (oldlike) salt[10] = (header.crc >>> 16) & 0xff; + + // 4. create output + const result = Buffer.alloc(data.length + 12); + encrypter(salt, result); + + // finally encode content + return encrypter(data, result, 12); +} + +module.exports = { decrypt, encrypt, _salter }; diff --git a/node_modules/adm-zip/package.json b/node_modules/adm-zip/package.json new file mode 100644 index 000000000..a6069e49a --- /dev/null +++ b/node_modules/adm-zip/package.json @@ -0,0 +1,48 @@ +{ + "name": "adm-zip", + "version": "0.5.9", + "description": "Javascript implementation of zip for nodejs with support for electron original-fs. Allows user to create or extract zip files both in memory or to/from disk", + "scripts": { + "test": "mocha -R spec", + "test:format": "npm run format:prettier:raw -- --check", + "format": "npm run format:prettier", + "format:prettier": "npm run format:prettier:raw -- --write", + "format:prettier:raw": "prettier \"**/*.{js,yml,json}\"" + }, + "keywords": [ + "zip", + "methods", + "archive", + "unzip" + ], + "homepage": "https://github.com/cthackers/adm-zip", + "author": "Nasca Iacob (https://github.com/cthackers)", + "bugs": { + "email": "sy@another-d-mention.ro", + "url": "https://github.com/cthackers/adm-zip/issues" + }, + "license": "MIT", + "files": [ + "adm-zip.js", + "headers", + "methods", + "util", + "zipEntry.js", + "zipFile.js", + "LICENSE" + ], + "main": "adm-zip.js", + "repository": { + "type": "git", + "url": "https://github.com/cthackers/adm-zip.git" + }, + "engines": { + "node": ">=6.0" + }, + "devDependencies": { + "chai": "^4.3.4", + "mocha": "^8.3.2", + "prettier": "^2.2.1", + "rimraf": "^3.0.2" + } +} diff --git a/node_modules/adm-zip/util/constants.js b/node_modules/adm-zip/util/constants.js new file mode 100644 index 000000000..119954bae --- /dev/null +++ b/node_modules/adm-zip/util/constants.js @@ -0,0 +1,142 @@ +module.exports = { + /* The local file header */ + LOCHDR : 30, // LOC header size + LOCSIG : 0x04034b50, // "PK\003\004" + LOCVER : 4, // version needed to extract + LOCFLG : 6, // general purpose bit flag + LOCHOW : 8, // compression method + LOCTIM : 10, // modification time (2 bytes time, 2 bytes date) + LOCCRC : 14, // uncompressed file crc-32 value + LOCSIZ : 18, // compressed size + LOCLEN : 22, // uncompressed size + LOCNAM : 26, // filename length + LOCEXT : 28, // extra field length + + /* The Data descriptor */ + EXTSIG : 0x08074b50, // "PK\007\008" + EXTHDR : 16, // EXT header size + EXTCRC : 4, // uncompressed file crc-32 value + EXTSIZ : 8, // compressed size + EXTLEN : 12, // uncompressed size + + /* The central directory file header */ + CENHDR : 46, // CEN header size + CENSIG : 0x02014b50, // "PK\001\002" + CENVEM : 4, // version made by + CENVER : 6, // version needed to extract + CENFLG : 8, // encrypt, decrypt flags + CENHOW : 10, // compression method + CENTIM : 12, // modification time (2 bytes time, 2 bytes date) + CENCRC : 16, // uncompressed file crc-32 value + CENSIZ : 20, // compressed size + CENLEN : 24, // uncompressed size + CENNAM : 28, // filename length + CENEXT : 30, // extra field length + CENCOM : 32, // file comment length + CENDSK : 34, // volume number start + CENATT : 36, // internal file attributes + CENATX : 38, // external file attributes (host system dependent) + CENOFF : 42, // LOC header offset + + /* The entries in the end of central directory */ + ENDHDR : 22, // END header size + ENDSIG : 0x06054b50, // "PK\005\006" + ENDSUB : 8, // number of entries on this disk + ENDTOT : 10, // total number of entries + ENDSIZ : 12, // central directory size in bytes + ENDOFF : 16, // offset of first CEN header + ENDCOM : 20, // zip file comment length + + END64HDR : 20, // zip64 END header size + END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007" + END64START : 4, // number of the disk with the start of the zip64 + END64OFF : 8, // relative offset of the zip64 end of central directory + END64NUMDISKS : 16, // total number of disks + + ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006" + ZIP64HDR : 56, // zip64 record minimum size + ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE + ZIP64SIZE : 4, // zip64 size of the central directory record + ZIP64VEM : 12, // zip64 version made by + ZIP64VER : 14, // zip64 version needed to extract + ZIP64DSK : 16, // zip64 number of this disk + ZIP64DSKDIR : 20, // number of the disk with the start of the record directory + ZIP64SUB : 24, // number of entries on this disk + ZIP64TOT : 32, // total number of entries + ZIP64SIZB : 40, // zip64 central directory size in bytes + ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number + ZIP64EXTRA : 56, // extensible data sector + + /* Compression methods */ + STORED : 0, // no compression + SHRUNK : 1, // shrunk + REDUCED1 : 2, // reduced with compression factor 1 + REDUCED2 : 3, // reduced with compression factor 2 + REDUCED3 : 4, // reduced with compression factor 3 + REDUCED4 : 5, // reduced with compression factor 4 + IMPLODED : 6, // imploded + // 7 reserved for Tokenizing compression algorithm + DEFLATED : 8, // deflated + ENHANCED_DEFLATED: 9, // enhanced deflated + PKWARE : 10,// PKWare DCL imploded + // 11 reserved by PKWARE + BZIP2 : 12, // compressed using BZIP2 + // 13 reserved by PKWARE + LZMA : 14, // LZMA + // 15-17 reserved by PKWARE + IBM_TERSE : 18, // compressed using IBM TERSE + IBM_LZ77 : 19, // IBM LZ77 z + AES_ENCRYPT : 99, // WinZIP AES encryption method + + /* General purpose bit flag */ + // values can obtained with expression 2**bitnr + FLG_ENC : 1, // Bit 0: encrypted file + FLG_COMP1 : 2, // Bit 1, compression option + FLG_COMP2 : 4, // Bit 2, compression option + FLG_DESC : 8, // Bit 3, data descriptor + FLG_ENH : 16, // Bit 4, enhanced deflating + FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data. + FLG_STR : 64, // Bit 6, strong encryption (patented) + // Bits 7-10: Currently unused. + FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS) + // Bit 12: Reserved by PKWARE for enhanced compression. + // Bit 13: encrypted the Central Directory (patented). + // Bits 14-15: Reserved by PKWARE. + FLG_MSK : 4096, // mask header values + + /* Load type */ + FILE : 2, + BUFFER : 1, + NONE : 0, + + /* 4.5 Extensible data fields */ + EF_ID : 0, + EF_SIZE : 2, + + /* Header IDs */ + ID_ZIP64 : 0x0001, + ID_AVINFO : 0x0007, + ID_PFS : 0x0008, + ID_OS2 : 0x0009, + ID_NTFS : 0x000a, + ID_OPENVMS : 0x000c, + ID_UNIX : 0x000d, + ID_FORK : 0x000e, + ID_PATCH : 0x000f, + ID_X509_PKCS7 : 0x0014, + ID_X509_CERTID_F : 0x0015, + ID_X509_CERTID_C : 0x0016, + ID_STRONGENC : 0x0017, + ID_RECORD_MGT : 0x0018, + ID_X509_PKCS7_RL : 0x0019, + ID_IBM1 : 0x0065, + ID_IBM2 : 0x0066, + ID_POSZIP : 0x4690, + + EF_ZIP64_OR_32 : 0xffffffff, + EF_ZIP64_OR_16 : 0xffff, + EF_ZIP64_SUNCOMP : 0, + EF_ZIP64_SCOMP : 8, + EF_ZIP64_RHO : 16, + EF_ZIP64_DSN : 24 +}; diff --git a/node_modules/adm-zip/util/errors.js b/node_modules/adm-zip/util/errors.js new file mode 100644 index 000000000..dde469bcf --- /dev/null +++ b/node_modules/adm-zip/util/errors.js @@ -0,0 +1,35 @@ +module.exports = { + /* Header error messages */ + INVALID_LOC: "Invalid LOC header (bad signature)", + INVALID_CEN: "Invalid CEN header (bad signature)", + INVALID_END: "Invalid END header (bad signature)", + + /* ZipEntry error messages*/ + NO_DATA: "Nothing to decompress", + BAD_CRC: "CRC32 checksum failed", + FILE_IN_THE_WAY: "There is a file in the way: %s", + UNKNOWN_METHOD: "Invalid/unsupported compression method", + + /* Inflater error messages */ + AVAIL_DATA: "inflate::Available inflate data did not terminate", + INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block", + TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes", + INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths", + INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length", + INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete", + INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths", + INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths", + INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement", + INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)", + + /* ADM-ZIP error messages */ + CANT_EXTRACT_FILE: "Could not extract the file", + CANT_OVERRIDE: "Target file already exists", + NO_ZIP: "No zip file was loaded", + NO_ENTRY: "Entry doesn't exist", + DIRECTORY_CONTENT_ERROR: "A directory cannot have content", + FILE_NOT_FOUND: "File not found: %s", + NOT_IMPLEMENTED: "Not implemented", + INVALID_FILENAME: "Invalid filename", + INVALID_FORMAT: "Invalid or unsupported zip format. No END header found" +}; diff --git a/node_modules/adm-zip/util/fattr.js b/node_modules/adm-zip/util/fattr.js new file mode 100644 index 000000000..163e2e526 --- /dev/null +++ b/node_modules/adm-zip/util/fattr.js @@ -0,0 +1,79 @@ +const fs = require("./fileSystem").require(); +const pth = require("path"); + +fs.existsSync = fs.existsSync || pth.existsSync; + +module.exports = function (/*String*/ path) { + var _path = path || "", + _obj = newAttr(), + _stat = null; + + function newAttr() { + return { + directory: false, + readonly: false, + hidden: false, + executable: false, + mtime: 0, + atime: 0 + }; + } + + if (_path && fs.existsSync(_path)) { + _stat = fs.statSync(_path); + _obj.directory = _stat.isDirectory(); + _obj.mtime = _stat.mtime; + _obj.atime = _stat.atime; + _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner + _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right + _obj.hidden = pth.basename(_path)[0] === "."; + } else { + console.warn("Invalid path: " + _path); + } + + return { + get directory() { + return _obj.directory; + }, + + get readOnly() { + return _obj.readonly; + }, + + get hidden() { + return _obj.hidden; + }, + + get mtime() { + return _obj.mtime; + }, + + get atime() { + return _obj.atime; + }, + + get executable() { + return _obj.executable; + }, + + decodeAttributes: function () {}, + + encodeAttributes: function () {}, + + toJSON: function () { + return { + path: _path, + isDirectory: _obj.directory, + isReadOnly: _obj.readonly, + isHidden: _obj.hidden, + isExecutable: _obj.executable, + mTime: _obj.mtime, + aTime: _obj.atime + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; diff --git a/node_modules/adm-zip/util/fileSystem.js b/node_modules/adm-zip/util/fileSystem.js new file mode 100644 index 000000000..dee5c2afd --- /dev/null +++ b/node_modules/adm-zip/util/fileSystem.js @@ -0,0 +1,11 @@ +exports.require = function () { + if (typeof process === "object" && process.versions && process.versions["electron"]) { + try { + const originalFs = require("original-fs"); + if (Object.keys(originalFs).length > 0) { + return originalFs; + } + } catch (e) {} + } + return require("fs"); +}; diff --git a/node_modules/adm-zip/util/index.js b/node_modules/adm-zip/util/index.js new file mode 100644 index 000000000..6790df40c --- /dev/null +++ b/node_modules/adm-zip/util/index.js @@ -0,0 +1,4 @@ +module.exports = require("./utils"); +module.exports.Constants = require("./constants"); +module.exports.Errors = require("./errors"); +module.exports.FileAttr = require("./fattr"); diff --git a/node_modules/adm-zip/util/utils.js b/node_modules/adm-zip/util/utils.js new file mode 100644 index 000000000..360338cbc --- /dev/null +++ b/node_modules/adm-zip/util/utils.js @@ -0,0 +1,246 @@ +const fsystem = require("./fileSystem").require(); +const pth = require("path"); +const Constants = require("./constants"); +const isWin = typeof process === "object" && "win32" === process.platform; + +const is_Obj = (obj) => obj && typeof obj === "object"; + +// generate CRC32 lookup table +const crcTable = new Uint32Array(256).map((t, c) => { + for (let k = 0; k < 8; k++) { + if ((c & 1) !== 0) { + c = 0xedb88320 ^ (c >>> 1); + } else { + c >>>= 1; + } + } + return c >>> 0; +}); + +// UTILS functions + +function Utils(opts) { + this.sep = pth.sep; + this.fs = fsystem; + + if (is_Obj(opts)) { + // custom filesystem + if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") { + this.fs = opts.fs; + } + } +} + +module.exports = Utils; + +// INSTANCED functions + +Utils.prototype.makeDir = function (/*String*/ folder) { + const self = this; + + // Sync - make directories tree + function mkdirSync(/*String*/ fpath) { + let resolvedPath = fpath.split(self.sep)[0]; + fpath.split(self.sep).forEach(function (name) { + if (!name || name.substr(-1, 1) === ":") return; + resolvedPath += self.sep + name; + var stat; + try { + stat = self.fs.statSync(resolvedPath); + } catch (e) { + self.fs.mkdirSync(resolvedPath); + } + if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath); + }); + } + + mkdirSync(folder); +}; + +Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) { + const self = this; + if (self.fs.existsSync(path)) { + if (!overwrite) return false; // cannot overwrite + + var stat = self.fs.statSync(path); + if (stat.isDirectory()) { + return false; + } + } + var folder = pth.dirname(path); + if (!self.fs.existsSync(folder)) { + self.makeDir(folder); + } + + var fd; + try { + fd = self.fs.openSync(path, "w", 438); // 0666 + } catch (e) { + self.fs.chmodSync(path, 438); + fd = self.fs.openSync(path, "w", 438); + } + if (fd) { + try { + self.fs.writeSync(fd, content, 0, content.length, 0); + } finally { + self.fs.closeSync(fd); + } + } + self.fs.chmodSync(path, attr || 438); + return true; +}; + +Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) { + if (typeof attr === "function") { + callback = attr; + attr = undefined; + } + + const self = this; + + self.fs.exists(path, function (exist) { + if (exist && !overwrite) return callback(false); + + self.fs.stat(path, function (err, stat) { + if (exist && stat.isDirectory()) { + return callback(false); + } + + var folder = pth.dirname(path); + self.fs.exists(folder, function (exists) { + if (!exists) self.makeDir(folder); + + self.fs.open(path, "w", 438, function (err, fd) { + if (err) { + self.fs.chmod(path, 438, function () { + self.fs.open(path, "w", 438, function (err, fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + }); + }); + }); + }); + } else if (fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + }); + }); + } else { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + } + }); + }); + }); + }); +}; + +Utils.prototype.findFiles = function (/*String*/ path) { + const self = this; + + function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) { + if (typeof pattern === "boolean") { + recursive = pattern; + pattern = undefined; + } + let files = []; + self.fs.readdirSync(dir).forEach(function (file) { + var path = pth.join(dir, file); + + if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive)); + + if (!pattern || pattern.test(path)) { + files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : "")); + } + }); + return files; + } + + return findSync(path, undefined, true); +}; + +Utils.prototype.getAttributes = function () {}; + +Utils.prototype.setAttributes = function () {}; + +// STATIC functions + +// crc32 single update (it is part of crc32) +Utils.crc32update = function (crc, byte) { + return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8); +}; + +Utils.crc32 = function (buf) { + if (typeof buf === "string") { + buf = Buffer.from(buf, "utf8"); + } + // Generate crcTable + if (!crcTable.length) genCRCTable(); + + let len = buf.length; + let crc = ~0; + for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]); + // xor and cast as uint32 number + return ~crc >>> 0; +}; + +Utils.methodToString = function (/*Number*/ method) { + switch (method) { + case Constants.STORED: + return "STORED (" + method + ")"; + case Constants.DEFLATED: + return "DEFLATED (" + method + ")"; + default: + return "UNSUPPORTED (" + method + ")"; + } +}; + +// removes ".." style path elements +Utils.canonical = function (/*string*/ path) { + if (!path) return ""; + // trick normalize think path is absolute + var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); + return pth.join(".", safeSuffix); +}; + +// make abolute paths taking prefix as root folder +Utils.sanitize = function (/*string*/ prefix, /*string*/ name) { + prefix = pth.resolve(pth.normalize(prefix)); + var parts = name.split("/"); + for (var i = 0, l = parts.length; i < l; i++) { + var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); + if (path.indexOf(prefix) === 0) { + return path; + } + } + return pth.normalize(pth.join(prefix, pth.basename(name))); +}; + +// converts buffer, Uint8Array, string types to buffer +Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) { + if (Buffer.isBuffer(input)) { + return input; + } else if (input instanceof Uint8Array) { + return Buffer.from(input); + } else { + // expect string all other values are invalid and return empty buffer + return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0); + } +}; + +Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) { + var slice = Buffer.from(buffer.slice(index, index + 8)); + slice.swap64(); + + return parseInt(`0x${slice.toString("hex")}`); +}; + +Utils.isWin = isWin; // Do we have windows system +Utils.crcTable = crcTable; diff --git a/node_modules/adm-zip/zipEntry.js b/node_modules/adm-zip/zipEntry.js new file mode 100644 index 000000000..8c3053b55 --- /dev/null +++ b/node_modules/adm-zip/zipEntry.js @@ -0,0 +1,333 @@ +var Utils = require("./util"), + Headers = require("./headers"), + Constants = Utils.Constants, + Methods = require("./methods"); + +module.exports = function (/*Buffer*/ input) { + var _entryHeader = new Headers.EntryHeader(), + _entryName = Buffer.alloc(0), + _comment = Buffer.alloc(0), + _isDirectory = false, + uncompressedData = null, + _extra = Buffer.alloc(0); + + function getCompressedDataFromZip() { + if (!input || !Buffer.isBuffer(input)) { + return Buffer.alloc(0); + } + _entryHeader.loadDataHeaderFromBinary(input); + return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); + } + + function crc32OK(data) { + // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written + if ((_entryHeader.flags & 0x8) !== 0x8) { + if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { + return false; + } + } else { + // @TODO: load and check data descriptor header + // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure + // (optionally preceded by a 4-byte signature) immediately after the compressed data: + } + return true; + } + + function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) { + if (typeof callback === "undefined" && typeof async === "string") { + pass = async; + async = void 0; + } + if (_isDirectory) { + if (async && callback) { + callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error. + } + return Buffer.alloc(0); + } + + var compressedData = getCompressedDataFromZip(); + + if (compressedData.length === 0) { + // File is empty, nothing to decompress. + if (async && callback) callback(compressedData); + return compressedData; + } + + if (_entryHeader.encripted) { + if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { + throw new Error("ADM-ZIP: Incompatible password parameter"); + } + compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass); + } + + var data = Buffer.alloc(_entryHeader.size); + + switch (_entryHeader.method) { + case Utils.Constants.STORED: + compressedData.copy(data); + if (!crc32OK(data)) { + if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error + throw new Error(Utils.Errors.BAD_CRC); + } else { + //si added otherwise did not seem to return data. + if (async && callback) callback(data); + return data; + } + case Utils.Constants.DEFLATED: + var inflater = new Methods.Inflater(compressedData); + if (!async) { + const result = inflater.inflate(data); + result.copy(data, 0); + if (!crc32OK(data)) { + throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString()); + } + return data; + } else { + inflater.inflateAsync(function (result) { + result.copy(result, 0); + if (callback) { + if (!crc32OK(result)) { + callback(result, Utils.Errors.BAD_CRC); //si added error + } else { + callback(result); + } + } + }); + } + break; + default: + if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD); + throw new Error(Utils.Errors.UNKNOWN_METHOD); + } + } + + function compress(/*Boolean*/ async, /*Function*/ callback) { + if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) { + // no data set or the data wasn't changed to require recompression + if (async && callback) callback(getCompressedDataFromZip()); + return getCompressedDataFromZip(); + } + + if (uncompressedData.length && !_isDirectory) { + var compressedData; + // Local file header + switch (_entryHeader.method) { + case Utils.Constants.STORED: + _entryHeader.compressedSize = _entryHeader.size; + + compressedData = Buffer.alloc(uncompressedData.length); + uncompressedData.copy(compressedData); + + if (async && callback) callback(compressedData); + return compressedData; + default: + case Utils.Constants.DEFLATED: + var deflater = new Methods.Deflater(uncompressedData); + if (!async) { + var deflated = deflater.deflate(); + _entryHeader.compressedSize = deflated.length; + return deflated; + } else { + deflater.deflateAsync(function (data) { + compressedData = Buffer.alloc(data.length); + _entryHeader.compressedSize = data.length; + data.copy(compressedData); + callback && callback(compressedData); + }); + } + deflater = null; + break; + } + } else if (async && callback) { + callback(Buffer.alloc(0)); + } else { + return Buffer.alloc(0); + } + } + + function readUInt64LE(buffer, offset) { + return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset); + } + + function parseExtra(data) { + var offset = 0; + var signature, size, part; + while (offset < data.length) { + signature = data.readUInt16LE(offset); + offset += 2; + size = data.readUInt16LE(offset); + offset += 2; + part = data.slice(offset, offset + size); + offset += size; + if (Constants.ID_ZIP64 === signature) { + parseZip64ExtendedInformation(part); + } + } + } + + //Override header field values with values from the ZIP64 extra field + function parseZip64ExtendedInformation(data) { + var size, compressedSize, offset, diskNumStart; + + if (data.length >= Constants.EF_ZIP64_SCOMP) { + size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); + if (_entryHeader.size === Constants.EF_ZIP64_OR_32) { + _entryHeader.size = size; + } + } + if (data.length >= Constants.EF_ZIP64_RHO) { + compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); + if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { + _entryHeader.compressedSize = compressedSize; + } + } + if (data.length >= Constants.EF_ZIP64_DSN) { + offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); + if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) { + _entryHeader.offset = offset; + } + } + if (data.length >= Constants.EF_ZIP64_DSN + 4) { + diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); + if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { + _entryHeader.diskNumStart = diskNumStart; + } + } + } + + return { + get entryName() { + return _entryName.toString(); + }, + get rawEntryName() { + return _entryName; + }, + set entryName(val) { + _entryName = Utils.toBuffer(val); + var lastChar = _entryName[_entryName.length - 1]; + _isDirectory = lastChar === 47 || lastChar === 92; + _entryHeader.fileNameLength = _entryName.length; + }, + + get extra() { + return _extra; + }, + set extra(val) { + _extra = val; + _entryHeader.extraLength = val.length; + parseExtra(val); + }, + + get comment() { + return _comment.toString(); + }, + set comment(val) { + _comment = Utils.toBuffer(val); + _entryHeader.commentLength = _comment.length; + }, + + get name() { + var n = _entryName.toString(); + return _isDirectory + ? n + .substr(n.length - 1) + .split("/") + .pop() + : n.split("/").pop(); + }, + get isDirectory() { + return _isDirectory; + }, + + getCompressedData: function () { + return compress(false, null); + }, + + getCompressedDataAsync: function (/*Function*/ callback) { + compress(true, callback); + }, + + setData: function (value) { + uncompressedData = Utils.toBuffer(value); + if (!_isDirectory && uncompressedData.length) { + _entryHeader.size = uncompressedData.length; + _entryHeader.method = Utils.Constants.DEFLATED; + _entryHeader.crc = Utils.crc32(value); + _entryHeader.changed = true; + } else { + // folders and blank files should be stored + _entryHeader.method = Utils.Constants.STORED; + } + }, + + getData: function (pass) { + if (_entryHeader.changed) { + return uncompressedData; + } else { + return decompress(false, null, pass); + } + }, + + getDataAsync: function (/*Function*/ callback, pass) { + if (_entryHeader.changed) { + callback(uncompressedData); + } else { + decompress(true, callback, pass); + } + }, + + set attr(attr) { + _entryHeader.attr = attr; + }, + get attr() { + return _entryHeader.attr; + }, + + set header(/*Buffer*/ data) { + _entryHeader.loadFromBinary(data); + }, + + get header() { + return _entryHeader; + }, + + packHeader: function () { + // 1. create header (buffer) + var header = _entryHeader.entryHeaderToBinary(); + var addpos = Utils.Constants.CENHDR; + // 2. add file name + _entryName.copy(header, addpos); + addpos += _entryName.length; + // 3. add extra data + if (_entryHeader.extraLength) { + _extra.copy(header, addpos); + addpos += _entryHeader.extraLength; + } + // 4. add file comment + if (_entryHeader.commentLength) { + _comment.copy(header, addpos); + } + return header; + }, + + toJSON: function () { + const bytes = function (nr) { + return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; + }; + + return { + entryName: this.entryName, + name: this.name, + comment: this.comment, + isDirectory: this.isDirectory, + header: _entryHeader.toJSON(), + compressedData: bytes(input), + data: bytes(uncompressedData) + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; diff --git a/node_modules/adm-zip/zipFile.js b/node_modules/adm-zip/zipFile.js new file mode 100644 index 000000000..997226a25 --- /dev/null +++ b/node_modules/adm-zip/zipFile.js @@ -0,0 +1,384 @@ +const ZipEntry = require("./zipEntry"); +const Headers = require("./headers"); +const Utils = require("./util"); + +module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { + var entryList = [], + entryTable = {}, + _comment = Buffer.alloc(0), + mainHeader = new Headers.MainHeader(), + loadedEntries = false; + + // assign options + const opts = Object.assign(Object.create(null), options); + + const { noSort } = opts; + + if (inBuffer) { + // is a memory buffer + readMainHeader(opts.readEntries); + } else { + // none. is a new file + loadedEntries = true; + } + + function iterateEntries(callback) { + const totalEntries = mainHeader.diskEntries; // total number of entries + let index = mainHeader.offset; // offset of first CEN header + + for (let i = 0; i < totalEntries; i++) { + let tmp = index; + const entry = new ZipEntry(inBuffer); + + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); + + index += entry.header.entryHeaderSize; + + callback(entry); + } + } + + function readEntries() { + loadedEntries = true; + entryTable = {}; + entryList = new Array(mainHeader.diskEntries); // total number of entries + var index = mainHeader.offset; // offset of first CEN header + for (var i = 0; i < entryList.length; i++) { + var tmp = index, + entry = new ZipEntry(inBuffer); + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); + + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); + + if (entry.header.extraLength) { + entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); + } + + if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); + + index += entry.header.entryHeaderSize; + + entryList[i] = entry; + entryTable[entry.entryName] = entry; + } + } + + function readMainHeader(/*Boolean*/ readNow) { + var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size + max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length + n = max, + endStart = inBuffer.length, + endOffset = -1, // Start offset of the END header + commentEnd = 0; + + for (i; i >= n; i--) { + if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' + if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { + // "PK\005\006" + endOffset = i; + commentEnd = i; + endStart = i + Utils.Constants.ENDHDR; + // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature + n = i - Utils.Constants.END64HDR; + continue; + } + + if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { + // Found a zip64 signature, let's continue reading the whole zip64 record + n = max; + continue; + } + + if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { + // Found the zip64 record, let's determine it's size + endOffset = i; + endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; + break; + } + } + + if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT); + + mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); + if (mainHeader.commentLength) { + _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); + } + if (readNow) readEntries(); + } + + function sortEntries() { + if (entryList.length > 1 && !noSort) { + entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); + } + } + + return { + /** + * Returns an array of ZipEntry objects existent in the current opened archive + * @return Array + */ + get entries() { + if (!loadedEntries) { + readEntries(); + } + return entryList; + }, + + /** + * Archive comment + * @return {String} + */ + get comment() { + return _comment.toString(); + }, + set comment(val) { + _comment = Utils.toBuffer(val); + mainHeader.commentLength = _comment.length; + }, + + getEntryCount: function () { + if (!loadedEntries) { + return mainHeader.diskEntries; + } + + return entryList.length; + }, + + forEach: function (callback) { + if (!loadedEntries) { + iterateEntries(callback); + return; + } + + entryList.forEach(callback); + }, + + /** + * Returns a reference to the entry with the given name or null if entry is inexistent + * + * @param entryName + * @return ZipEntry + */ + getEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + return entryTable[entryName] || null; + }, + + /** + * Adds the given entry to the entry list + * + * @param entry + */ + setEntry: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + entryList.push(entry); + entryTable[entry.entryName] = entry; + mainHeader.totalEntries = entryList.length; + }, + + /** + * Removes the entry with the given name from the entry list. + * + * If the entry is a directory, then all nested files and directories will be removed + * @param entryName + */ + deleteEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + var entry = entryTable[entryName]; + if (entry && entry.isDirectory) { + var _self = this; + this.getEntryChildren(entry).forEach(function (child) { + if (child.entryName !== entryName) { + _self.deleteEntry(child.entryName); + } + }); + } + entryList.splice(entryList.indexOf(entry), 1); + delete entryTable[entryName]; + mainHeader.totalEntries = entryList.length; + }, + + /** + * Iterates and returns all nested files and directories of the given entry + * + * @param entry + * @return Array + */ + getEntryChildren: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + if (entry && entry.isDirectory) { + const list = []; + const name = entry.entryName; + const len = name.length; + + entryList.forEach(function (zipEntry) { + if (zipEntry.entryName.substr(0, len) === name) { + list.push(zipEntry); + } + }); + return list; + } + return []; + }, + + /** + * Returns the zip file + * + * @return Buffer + */ + compressToBuffer: function () { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); + + const dataBlock = []; + const entryHeaders = []; + let totalSize = 0; + let dindex = 0; + + mainHeader.size = 0; + mainHeader.offset = 0; + + for (const entry of entryList) { + // compress data and set local and entry header accordingly. Reason why is called first + const compressedData = entry.getCompressedData(); + // 1. construct data header + entry.header.offset = dindex; + const dataHeader = entry.header.dataHeaderToBinary(); + const entryNameLen = entry.rawEntryName.length; + // 1.2. postheader - data after data header + const postHeader = Buffer.alloc(entryNameLen + entry.extra.length); + entry.rawEntryName.copy(postHeader, 0); + postHeader.copy(entry.extra, entryNameLen); + + // 2. offsets + const dataLength = dataHeader.length + postHeader.length + compressedData.length; + dindex += dataLength; + + // 3. store values in sequence + dataBlock.push(dataHeader); + dataBlock.push(postHeader); + dataBlock.push(compressedData); + + // 4. construct entry header + const entryHeader = entry.packHeader(); + entryHeaders.push(entryHeader); + // 5. update main header + mainHeader.size += entryHeader.length; + totalSize += dataLength + entryHeader.length; + } + + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; + + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + // write data blocks + for (const content of dataBlock) { + content.copy(outBuffer, dindex); + dindex += content.length; + } + + // write central directory entries + for (const content of entryHeaders) { + content.copy(outBuffer, dindex); + dindex += content.length; + } + + // write main header + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } + mh.copy(outBuffer, dindex); + + return outBuffer; + }, + + toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { + try { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); + + const dataBlock = []; + const entryHeaders = []; + let totalSize = 0; + let dindex = 0; + + mainHeader.size = 0; + mainHeader.offset = 0; + + const compress2Buffer = function (entryLists) { + if (entryLists.length) { + const entry = entryLists.pop(); + const name = entry.entryName + entry.extra.toString(); + if (onItemStart) onItemStart(name); + entry.getCompressedDataAsync(function (compressedData) { + if (onItemEnd) onItemEnd(name); + + entry.header.offset = dindex; + // data header + const dataHeader = entry.header.dataHeaderToBinary(); + const postHeader = Buffer.alloc(name.length, name); + const dataLength = dataHeader.length + postHeader.length + compressedData.length; + + dindex += dataLength; + + dataBlock.push(dataHeader); + dataBlock.push(postHeader); + dataBlock.push(compressedData); + + const entryHeader = entry.packHeader(); + entryHeaders.push(entryHeader); + mainHeader.size += entryHeader.length; + totalSize += dataLength + entryHeader.length; + + compress2Buffer(entryLists); + }); + } else { + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; + + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + dataBlock.forEach(function (content) { + content.copy(outBuffer, dindex); // write data blocks + dindex += content.length; + }); + entryHeaders.forEach(function (content) { + content.copy(outBuffer, dindex); // write central directory entries + dindex += content.length; + }); + + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } + + mh.copy(outBuffer, dindex); // write main header + + onSuccess(outBuffer); + } + }; + + compress2Buffer(entryList); + } catch (e) { + onFail(e); + } + } + }; +}; diff --git a/package-lock.json b/package-lock.json index d169a816b..9301ccf30 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,6 +18,7 @@ "@chrisgavin/safe-which": "^1.0.2", "@octokit/plugin-retry": "^3.0.9", "@octokit/types": "^6.21.1", + "adm-zip": "^0.5.9", "commander": "^8.1.0", "console-log-level": "^1.4.1", "del": "^6.0.0", @@ -38,6 +39,7 @@ }, "devDependencies": { "@ava/typescript": "3.0.1", + "@types/adm-zip": "^0.5.0", "@types/js-yaml": "^4.0.5", "@types/long": "4.0.1", "@types/node": "16.11.22", @@ -517,6 +519,15 @@ "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==", "dev": true }, + "node_modules/@types/adm-zip": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.0.tgz", + "integrity": "sha512-FCJBJq9ODsQZUNURo5ILAQueuA8WJhRvuihS3ke2iI25mJlfV2LK8jG2Qj2z2AWg8U0FtWWqBHVRetceLskSaw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/color-name": { "version": "1.1.1", "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", @@ -852,6 +863,14 @@ "node": ">=0.4.0" } }, + "node_modules/adm-zip": { + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.9.tgz", + "integrity": "sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg==", + "engines": { + "node": ">=6.0" + } + }, "node_modules/aggregate-error": { "version": "3.0.1", "integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==", @@ -5887,6 +5906,15 @@ "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==", "dev": true }, + "@types/adm-zip": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.0.tgz", + "integrity": "sha512-FCJBJq9ODsQZUNURo5ILAQueuA8WJhRvuihS3ke2iI25mJlfV2LK8jG2Qj2z2AWg8U0FtWWqBHVRetceLskSaw==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/color-name": { "version": "1.1.1", "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", @@ -6096,6 +6124,11 @@ "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", "dev": true }, + "adm-zip": { + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.9.tgz", + "integrity": "sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg==" + }, "aggregate-error": { "version": "3.0.1", "integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==", diff --git a/package.json b/package.json index 80036aad9..da7b98bd9 100644 --- a/package.json +++ b/package.json @@ -30,6 +30,7 @@ "@chrisgavin/safe-which": "^1.0.2", "@octokit/plugin-retry": "^3.0.9", "@octokit/types": "^6.21.1", + "adm-zip": "^0.5.9", "commander": "^8.1.0", "console-log-level": "^1.4.1", "del": "^6.0.0", @@ -53,6 +54,7 @@ ], "devDependencies": { "@ava/typescript": "3.0.1", + "@types/adm-zip": "^0.5.0", "@types/js-yaml": "^4.0.5", "@types/long": "4.0.1", "@types/node": "16.11.22", diff --git a/src/init-action-cleanup.ts b/src/init-action-cleanup.ts index e775c682c..9968b3b48 100644 --- a/src/init-action-cleanup.ts +++ b/src/init-action-cleanup.ts @@ -2,6 +2,8 @@ import * as fs from "fs"; import * as path from "path"; import * as core from "@actions/core"; +import AdmZip from "adm-zip"; +import del from "del"; import * as actionsUtil from "./actions-util"; import { dbIsFinalized } from "./analyze"; @@ -32,7 +34,24 @@ export async function uploadDatabaseBundleDebugArtifact( core.info( `${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle...` ); - // TODO(angelapwen): Zip up files and upload directly. + // Zip up files and upload directly. + const databasePath = getCodeQLDatabasePath(config, language); + const databaseBundlePath = path.resolve( + config.dbLocation, + `${config.debugDatabaseName}.zip` + ); + // See `bundleDb` for explanation behind deleting existing db bundle. + if (fs.existsSync(databaseBundlePath)) { + await del(databaseBundlePath, { force: true }); + } + const zip = new AdmZip(); + zip.addLocalFolder(databasePath); + zip.writeZip(databaseBundlePath); + await actionsUtil.uploadDebugArtifacts( + [databaseBundlePath], + config.dbLocation, + config.debugArtifactName + ); continue; } try {