const ZipEntry = require("./zipEntry"); const Headers = require("./headers"); const Utils = require("./util"); module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { var entryList = [], entryTable = {}, _comment = Buffer.alloc(0), mainHeader = new Headers.MainHeader(), loadedEntries = false; var password = null; const temporary = new Set(); // assign options const opts = options; const { noSort, decoder } = opts; if (inBuffer) { // is a memory buffer readMainHeader(opts.readEntries); } else { // none. is a new file loadedEntries = true; } function makeTemporaryFolders() { const foldersList = new Set(); // Make list of all folders in file for (const elem of Object.keys(entryTable)) { const elements = elem.split("/"); elements.pop(); // filename if (!elements.length) continue; // no folders for (let i = 0; i < elements.length; i++) { const sub = elements.slice(0, i + 1).join("/") + "/"; foldersList.add(sub); } } // create missing folders as temporary for (const elem of foldersList) { if (!(elem in entryTable)) { const tempfolder = new ZipEntry(opts); tempfolder.entryName = elem; tempfolder.attr = 0x10; tempfolder.temporary = true; entryList.push(tempfolder); entryTable[tempfolder.entryName] = tempfolder; temporary.add(tempfolder); } } } function readEntries() { loadedEntries = true; entryTable = {}; if (mainHeader.diskEntries > (inBuffer.length - mainHeader.offset) / Utils.Constants.CENHDR) { throw Utils.Errors.DISK_ENTRY_TOO_LARGE(); } entryList = new Array(mainHeader.diskEntries); // total number of entries var index = mainHeader.offset; // offset of first CEN header for (var i = 0; i < entryList.length; i++) { var tmp = index, entry = new ZipEntry(opts, inBuffer); entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); if (entry.header.extraLength) { entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); } if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); index += entry.header.centralHeaderSize; entryList[i] = entry; entryTable[entry.entryName] = entry; } temporary.clear(); makeTemporaryFolders(); } function readMainHeader(/*Boolean*/ readNow) { var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length n = max, endStart = inBuffer.length, endOffset = -1, // Start offset of the END header commentEnd = 0; // option to search header form entire file const trailingSpace = typeof opts.trailingSpace === "boolean" ? opts.trailingSpace : false; if (trailingSpace) max = 0; for (i; i >= n; i--) { if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { // "PK\005\006" endOffset = i; commentEnd = i; endStart = i + Utils.Constants.ENDHDR; // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature n = i - Utils.Constants.END64HDR; continue; } if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { // Found a zip64 signature, let's continue reading the whole zip64 record n = max; continue; } if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { // Found the zip64 record, let's determine it's size endOffset = i; endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; break; } } if (endOffset == -1) throw Utils.Errors.INVALID_FORMAT(); mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); if (mainHeader.commentLength) { _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); } if (readNow) readEntries(); } function sortEntries() { if (entryList.length > 1 && !noSort) { entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); } } return { /** * Returns an array of ZipEntry objects existent in the current opened archive * @return Array */ get entries() { if (!loadedEntries) { readEntries(); } return entryList.filter((e) => !temporary.has(e)); }, /** * Archive comment * @return {String} */ get comment() { return decoder.decode(_comment); }, set comment(val) { _comment = Utils.toBuffer(val, decoder.encode); mainHeader.commentLength = _comment.length; }, getEntryCount: function () { if (!loadedEntries) { return mainHeader.diskEntries; } return entryList.length; }, forEach: function (callback) { this.entries.forEach(callback); }, /** * Returns a reference to the entry with the given name or null if entry is inexistent * * @param entryName * @return ZipEntry */ getEntry: function (/*String*/ entryName) { if (!loadedEntries) { readEntries(); } return entryTable[entryName] || null; }, /** * Adds the given entry to the entry list * * @param entry */ setEntry: function (/*ZipEntry*/ entry) { if (!loadedEntries) { readEntries(); } entryList.push(entry); entryTable[entry.entryName] = entry; mainHeader.totalEntries = entryList.length; }, /** * Removes the file with the given name from the entry list. * * If the entry is a directory, then all nested files and directories will be removed * @param entryName * @returns {void} */ deleteFile: function (/*String*/ entryName, withsubfolders = true) { if (!loadedEntries) { readEntries(); } const entry = entryTable[entryName]; const list = this.getEntryChildren(entry, withsubfolders).map((child) => child.entryName); list.forEach(this.deleteEntry); }, /** * Removes the entry with the given name from the entry list. * * @param {string} entryName * @returns {void} */ deleteEntry: function (/*String*/ entryName) { if (!loadedEntries) { readEntries(); } const entry = entryTable[entryName]; const index = entryList.indexOf(entry); if (index >= 0) { entryList.splice(index, 1); delete entryTable[entryName]; mainHeader.totalEntries = entryList.length; } }, /** * Iterates and returns all nested files and directories of the given entry * * @param entry * @return Array */ getEntryChildren: function (/*ZipEntry*/ entry, subfolders = true) { if (!loadedEntries) { readEntries(); } if (typeof entry === "object") { if (entry.isDirectory && subfolders) { const list = []; const name = entry.entryName; for (const zipEntry of entryList) { if (zipEntry.entryName.startsWith(name)) { list.push(zipEntry); } } return list; } else { return [entry]; } } return []; }, /** * How many child elements entry has * * @param {ZipEntry} entry * @return {integer} */ getChildCount: function (entry) { if (entry && entry.isDirectory) { const list = this.getEntryChildren(entry); return list.includes(entry) ? list.length - 1 : list.length; } return 0; }, /** * Returns the zip file * * @return Buffer */ compressToBuffer: function () { if (!loadedEntries) { readEntries(); } sortEntries(); const dataBlock = []; const headerBlocks = []; let totalSize = 0; let dindex = 0; mainHeader.size = 0; mainHeader.offset = 0; let totalEntries = 0; for (const entry of this.entries) { // compress data and set local and entry header accordingly. Reason why is called first const compressedData = entry.getCompressedData(); entry.header.offset = dindex; // 1. construct local header const localHeader = entry.packLocalHeader(); // 2. offsets const dataLength = localHeader.length + compressedData.length; dindex += dataLength; // 3. store values in sequence dataBlock.push(localHeader); dataBlock.push(compressedData); // 4. construct central header const centralHeader = entry.packCentralHeader(); headerBlocks.push(centralHeader); // 5. update main header mainHeader.size += centralHeader.length; totalSize += dataLength + centralHeader.length; totalEntries++; } totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length // point to end of data and beginning of central directory first record mainHeader.offset = dindex; mainHeader.totalEntries = totalEntries; dindex = 0; const outBuffer = Buffer.alloc(totalSize); // write data blocks for (const content of dataBlock) { content.copy(outBuffer, dindex); dindex += content.length; } // write central directory entries for (const content of headerBlocks) { content.copy(outBuffer, dindex); dindex += content.length; } // write main header const mh = mainHeader.toBinary(); if (_comment) { _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment } mh.copy(outBuffer, dindex); // Since we update entry and main header offsets, // they are no longer valid and we have to reset content // (Issue 64) inBuffer = outBuffer; loadedEntries = false; return outBuffer; }, toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { try { if (!loadedEntries) { readEntries(); } sortEntries(); const dataBlock = []; const centralHeaders = []; let totalSize = 0; let dindex = 0; let totalEntries = 0; mainHeader.size = 0; mainHeader.offset = 0; const compress2Buffer = function (entryLists) { if (entryLists.length > 0) { const entry = entryLists.shift(); const name = entry.entryName + entry.extra.toString(); if (onItemStart) onItemStart(name); entry.getCompressedDataAsync(function (compressedData) { if (onItemEnd) onItemEnd(name); entry.header.offset = dindex; // 1. construct local header const localHeader = entry.packLocalHeader(); // 2. offsets const dataLength = localHeader.length + compressedData.length; dindex += dataLength; // 3. store values in sequence dataBlock.push(localHeader); dataBlock.push(compressedData); // central header const centalHeader = entry.packCentralHeader(); centralHeaders.push(centalHeader); mainHeader.size += centalHeader.length; totalSize += dataLength + centalHeader.length; totalEntries++; compress2Buffer(entryLists); }); } else { totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length // point to end of data and beginning of central directory first record mainHeader.offset = dindex; mainHeader.totalEntries = totalEntries; dindex = 0; const outBuffer = Buffer.alloc(totalSize); dataBlock.forEach(function (content) { content.copy(outBuffer, dindex); // write data blocks dindex += content.length; }); centralHeaders.forEach(function (content) { content.copy(outBuffer, dindex); // write central directory entries dindex += content.length; }); const mh = mainHeader.toBinary(); if (_comment) { _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment } mh.copy(outBuffer, dindex); // write main header // Since we update entry and main header offsets, they are no // longer valid and we have to reset content using our new buffer // (Issue 64) inBuffer = outBuffer; loadedEntries = false; onSuccess(outBuffer); } }; compress2Buffer(Array.from(this.entries)); } catch (e) { onFail(e); } } }; };