From a10059fda3f6235afdd8e612fc64fc3e82639fb0 Mon Sep 17 00:00:00 2001 From: Chris Talkington Date: Thu, 17 Oct 2024 22:58:32 -0500 Subject: [PATCH] esm (#790) --- .github/workflows/nodejs.yml | 24 +- .github/workflows/npmpublish.yml | 4 +- .prettierignore | 3 + CHANGELOG.md | 2 + CONTRIBUTING.md | 11 +- README.md | 51 +- benchmark/common.js | 4 +- benchmark/simple/pack-zip.js | 28 +- examples/express.js | 38 +- examples/pack-tar.js | 28 +- examples/pack-tgz.js | 32 +- examples/pack-zip.js | 28 +- examples/progress.js | 84 +- index.js | 117 +-- lib/core.js | 1605 ++++++++++++++---------------- lib/error.js | 51 +- lib/plugins/json.js | 167 ++-- lib/plugins/tar.js | 257 ++--- lib/plugins/zip.js | 176 ++-- lib/utils.js | 66 ++ package-lock.json | 333 ++++--- package.json | 14 +- test/archiver.js | 650 ++++++------ test/helpers/index.js | 86 +- test/plugins.js | 343 ++++--- website/babel.config.js | 2 +- website/docs/archiver_api.md | 113 ++- website/docs/quickstart.md | 46 +- website/docusaurus.config.js | 78 +- website/src/pages/index.js | 39 +- website/src/pages/zipstream.md | 34 +- 31 files changed, 2209 insertions(+), 2305 deletions(-) create mode 100644 .prettierignore create mode 100644 lib/utils.js diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index d11701dd..7f171647 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -14,17 +14,17 @@ jobs: strategy: matrix: - node-version: [14.x, 16.x, 18.x, 20.x] + node-version: [18.x, 20.x] steps: - - uses: actions/checkout@v4.2.1 - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v4.0.4 - with: - node-version: ${{ matrix.node-version }} - - name: npm install and test - run: | - npm ci - npm test - env: - CI: true + - uses: actions/checkout@v4.2.1 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4.0.4 + with: + node-version: ${{ matrix.node-version }} + - name: npm install and test + run: | + npm ci + npm test + env: + CI: true diff --git a/.github/workflows/npmpublish.yml b/.github/workflows/npmpublish.yml index 8f531f0f..2edfb950 100644 --- a/.github/workflows/npmpublish.yml +++ b/.github/workflows/npmpublish.yml @@ -11,7 +11,7 @@ jobs: - uses: actions/checkout@v4.2.1 - uses: actions/setup-node@v4.0.4 with: - node-version: 16 + node-version: 20 - run: npm ci - run: npm test @@ -22,7 +22,7 @@ jobs: - uses: actions/checkout@v4.2.1 - uses: actions/setup-node@v4.0.4 with: - node-version: 16 + node-version: 20 registry-url: https://registry.npmjs.org/ - run: npm ci - run: npm publish diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..1b8ac889 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,3 @@ +# Ignore artifacts: +build +coverage diff --git a/CHANGELOG.md b/CHANGELOG.md index 2d4975de..915ca8a2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,7 @@ ## Changelog +**8.0.0** - _October 17, 2024_ — [Diff](https://github.com/archiverjs/node-archiver/compare/7.0.1...8.0.0) + **7.0.1** - _March 9, 2024_ — [Diff](https://github.com/archiverjs/node-archiver/compare/7.0.0...7.0.1) **7.0.0** - _February 28, 2024_ — [Diff](https://github.com/archiverjs/node-archiver/compare/6.0.2...7.0.0) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eca68a45..ee85a307 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,13 +2,10 @@ #### Code Style Guide -* code should be indented with 2 spaces -* single quotes should be used where feasible -* commas should be followed by a single space (function params, etc) -* variable declaration should include `var`, [no multiple declarations](http://benalman.com/news/2012/05/multiple-var-statements-javascript/) +- code should be ran through `prettier` #### Tests -* tests should be added to the nodeunit configs in `tests/` -* tests can be run with `npm test` -* see existing tests for guidance \ No newline at end of file +- tests should be added in `test/` +- tests can be run with `npm test` +- see existing tests for guidance diff --git a/README.md b/README.md index 2ee7c171..b91ab342 100644 --- a/README.md +++ b/README.md @@ -13,33 +13,34 @@ npm install archiver --save ## Quick Start ```js -// require modules -const fs = require('fs'); -const archiver = require('archiver'); +import fs from "fs"; +import { ZipArchive } from "archiver"; // create a file to stream archive data to. -const output = fs.createWriteStream(__dirname + '/example.zip'); -const archive = archiver('zip', { - zlib: { level: 9 } // Sets the compression level. +const output = fs.createWriteStream(__dirname + "/example.zip"); +const archive = new ZipArchive({ + zlib: { level: 9 }, // Sets the compression level. }); // listen for all archive data to be written // 'close' event is fired only when a file descriptor is involved -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); // This event is fired when the data source is drained no matter what was the data source. // It is not part of this library but rather from the NodeJS Stream API. // @see: https://nodejs.org/api/stream.html#stream_event_end -output.on('end', function() { - console.log('Data has been drained'); +output.on("end", function () { + console.log("Data has been drained"); }); // good practice to catch warnings (ie stat failures and other non-blocking errors) -archive.on('warning', function(err) { - if (err.code === 'ENOENT') { +archive.on("warning", function (err) { + if (err.code === "ENOENT") { // log warning } else { // throw error @@ -48,7 +49,7 @@ archive.on('warning', function(err) { }); // good practice to catch this error explicitly -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); @@ -56,27 +57,27 @@ archive.on('error', function(err) { archive.pipe(output); // append a file from stream -const file1 = __dirname + '/file1.txt'; -archive.append(fs.createReadStream(file1), { name: 'file1.txt' }); +const file1 = __dirname + "/file1.txt"; +archive.append(fs.createReadStream(file1), { name: "file1.txt" }); // append a file from string -archive.append('string cheese!', { name: 'file2.txt' }); +archive.append("string cheese!", { name: "file2.txt" }); // append a file from buffer -const buffer3 = Buffer.from('buff it!'); -archive.append(buffer3, { name: 'file3.txt' }); +const buffer3 = Buffer.from("buff it!"); +archive.append(buffer3, { name: "file3.txt" }); // append a file -archive.file('file1.txt', { name: 'file4.txt' }); +archive.file("file1.txt", { name: "file4.txt" }); // append files from a sub-directory and naming it `new-subdir` within the archive -archive.directory('subdir/', 'new-subdir'); +archive.directory("subdir/", "new-subdir"); // append files from a sub-directory, putting its contents at the root of archive -archive.directory('subdir/', false); +archive.directory("subdir/", false); // append files from a glob pattern -archive.glob('file*.txt', {cwd:__dirname}); +archive.glob("file*.txt", { cwd: __dirname }); // finalize the archive (ie we are done appending files but streams have to finish yet) // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand @@ -86,7 +87,3 @@ archive.finalize(); ## Formats Archiver ships with out of the box support for TAR and ZIP archives. - -You can register additional formats with `registerFormat`. - -You can check if format already exists before to register a new one with `isRegisteredFormat`. diff --git a/benchmark/common.js b/benchmark/common.js index 667c741c..a3809f34 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -2,10 +2,10 @@ function binaryBuffer(n) { var buffer = Buffer.alloc(n); for (var i = 0; i < n; i++) { - buffer.writeUInt8(i&255, i); + buffer.writeUInt8(i & 255, i); } return buffer; } -module.exports.binaryBuffer = binaryBuffer; \ No newline at end of file +module.exports.binaryBuffer = binaryBuffer; diff --git a/benchmark/simple/pack-zip.js b/benchmark/simple/pack-zip.js index a6c76705..38561bb7 100644 --- a/benchmark/simple/pack-zip.js +++ b/benchmark/simple/pack-zip.js @@ -1,10 +1,10 @@ -var fs = require('fs'); +var fs = require("fs"); -var mkdir = require('mkdirp'); -var streamBench = require('stream-bench'); +var mkdir = require("mkdirp"); +var streamBench = require("stream-bench"); -var archiver = require('../../'); -var common = require('../common'); +var archiver = require("../../"); +var common = require("../common"); var binaryBuffer = common.binaryBuffer; @@ -30,29 +30,27 @@ if (process.argv[2]) { } } -var archive = archiver('zip', { +var archive = archiver("zip", { zlib: { - level: level - } + level: level, + }, }); if (file === false) { - mkdir.sync('tmp'); + mkdir.sync("tmp"); - file = 'tmp/20mb.dat'; + file = "tmp/20mb.dat"; fs.writeFileSync(file, binaryBuffer(BITS_IN_MBYTE * 20)); } -console.log('zlib level: ' + level); +console.log("zlib level: " + level); var bench = streamBench({ logReport: true, interval: 500, - dump: true + dump: true, }); archive.pipe(bench); -archive - .file(file, { name: 'large file' }) - .finalize(); +archive.file(file, { name: "large file" }).finalize(); diff --git a/examples/express.js b/examples/express.js index 73531515..e5531317 100644 --- a/examples/express.js +++ b/examples/express.js @@ -1,40 +1,44 @@ -var app = require('express')(); -var archiver = require('archiver'); -var p = require('path'); +var app = require("express")(); +var archiver = require("archiver"); +var p = require("path"); -app.get('/', function(req, res) { +app.get("/", function (req, res) { + var archive = archiver("zip"); - var archive = archiver('zip'); - - archive.on('error', function(err) { - res.status(500).send({error: err.message}); + archive.on("error", function (err) { + res.status(500).send({ error: err.message }); }); //on stream closed we can end the request - archive.on('end', function() { - console.log('Archive wrote %d bytes', archive.pointer()); + archive.on("end", function () { + console.log("Archive wrote %d bytes", archive.pointer()); }); //set the archive name - res.attachment('archive-name.zip'); + res.attachment("archive-name.zip"); //this is the streaming magic archive.pipe(res); - var files = [__dirname + '/fixtures/file1.txt', __dirname + '/fixtures/file2.txt']; + var files = [ + __dirname + "/fixtures/file1.txt", + __dirname + "/fixtures/file2.txt", + ]; - for(var i in files) { + for (var i in files) { archive.file(files[i], { name: p.basename(files[i]) }); } - var directories = [__dirname + '/fixtures/somedir'] + var directories = [__dirname + "/fixtures/somedir"]; - for(var i in directories) { - archive.directory(directories[i], directories[i].replace(__dirname + '/fixtures', '')); + for (var i in directories) { + archive.directory( + directories[i], + directories[i].replace(__dirname + "/fixtures", ""), + ); } archive.finalize(); - }); app.listen(3000); diff --git a/examples/pack-tar.js b/examples/pack-tar.js index ac846482..1d6b4b30 100644 --- a/examples/pack-tar.js +++ b/examples/pack-tar.js @@ -1,25 +1,27 @@ -var fs = require('fs'); +var fs = require("fs"); -var archiver = require('archiver'); +var archiver = require("archiver"); -var output = fs.createWriteStream(__dirname + '/example-output.tar'); -var archive = archiver('tar'); +var output = fs.createWriteStream(__dirname + "/example-output.tar"); +var archive = archiver("tar"); -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); archive.pipe(output); -var file1 = __dirname + '/fixtures/file1.txt'; -var file2 = __dirname + '/fixtures/file2.txt'; +var file1 = __dirname + "/fixtures/file1.txt"; +var file2 = __dirname + "/fixtures/file2.txt"; archive - .append(fs.createReadStream(file1), { name: 'file1.txt' }) - .append(fs.createReadStream(file2), { name: 'file2.txt' }) - .finalize(); \ No newline at end of file + .append(fs.createReadStream(file1), { name: "file1.txt" }) + .append(fs.createReadStream(file2), { name: "file2.txt" }) + .finalize(); diff --git a/examples/pack-tgz.js b/examples/pack-tgz.js index c6c2dc2c..58f6aa45 100644 --- a/examples/pack-tgz.js +++ b/examples/pack-tgz.js @@ -1,29 +1,31 @@ -var fs = require('fs'); -var archiver = require('archiver'); +var fs = require("fs"); +var archiver = require("archiver"); -var output = fs.createWriteStream(__dirname + '/example-output.tar.gz'); -var archive = archiver('tar', { +var output = fs.createWriteStream(__dirname + "/example-output.tar.gz"); +var archive = archiver("tar", { gzip: true, gzipOptions: { - level: 1 - } + level: 1, + }, }); -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); archive.pipe(output); -var file1 = __dirname + '/fixtures/file1.txt'; -var file2 = __dirname + '/fixtures/file2.txt'; +var file1 = __dirname + "/fixtures/file1.txt"; +var file2 = __dirname + "/fixtures/file2.txt"; archive - .append(fs.createReadStream(file1), { name: 'file1.txt' }) - .append(fs.createReadStream(file2), { name: 'file2.txt' }) - .finalize(); \ No newline at end of file + .append(fs.createReadStream(file1), { name: "file1.txt" }) + .append(fs.createReadStream(file2), { name: "file2.txt" }) + .finalize(); diff --git a/examples/pack-zip.js b/examples/pack-zip.js index 07e85c13..e47c7b1c 100644 --- a/examples/pack-zip.js +++ b/examples/pack-zip.js @@ -1,25 +1,27 @@ -var fs = require('fs'); +var fs = require("fs"); -var archiver = require('archiver'); +var archiver = require("archiver"); -var output = fs.createWriteStream(__dirname + '/example-output.zip'); -var archive = archiver('zip'); +var output = fs.createWriteStream(__dirname + "/example-output.zip"); +var archive = archiver("zip"); -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); archive.pipe(output); -var file1 = __dirname + '/fixtures/file1.txt'; -var file2 = __dirname + '/fixtures/file2.txt'; +var file1 = __dirname + "/fixtures/file1.txt"; +var file2 = __dirname + "/fixtures/file2.txt"; archive - .append(fs.createReadStream(file1), { name: 'file1.txt' }) - .append(fs.createReadStream(file2), { name: 'file2.txt' }) - .finalize(); \ No newline at end of file + .append(fs.createReadStream(file1), { name: "file1.txt" }) + .append(fs.createReadStream(file2), { name: "file2.txt" }) + .finalize(); diff --git a/examples/progress.js b/examples/progress.js index a3b52940..3c6beb11 100644 --- a/examples/progress.js +++ b/examples/progress.js @@ -1,41 +1,46 @@ -var archiver = require('../'); -var tmp = require('os').tmpdir(); -var async = require('async'); -var fs = require('fs'); +var archiver = require("../"); +var tmp = require("os").tmpdir(); +var async = require("async"); +var fs = require("fs"); // You can change this by something bigger! -var directory = __dirname + '/fixtures'; -var destination = tmp + '/' + Date.now() + '.zip'; +var directory = __dirname + "/fixtures"; +var destination = tmp + "/" + Date.now() + ".zip"; var destinationStream = fs.createWriteStream(destination); -console.log('Zipping %s to %s', directory, destination); +console.log("Zipping %s to %s", directory, destination); // To find out the progression, we may prefer to first calculate the size of the zip's future content // For this, we need to recursivly `readDir` and get the size from a `stat` call on every file. // Note that Archiver is also computing the total size, but it's done asynchronously and may not be accurate -directorySize(directory, function(err, totalSize) { - var prettyTotalSize = bytesToSize(totalSize) - var archive = archiver('zip'); +directorySize(directory, function (err, totalSize) { + var prettyTotalSize = bytesToSize(totalSize); + var archive = archiver("zip"); - archive.on('error', function(err) { - console.error('Error while zipping', err); + archive.on("error", function (err) { + console.error("Error while zipping", err); }); - archive.on('progress', function(progress) { - var percent = progress.fs.processedBytes / totalSize * 100; + archive.on("progress", function (progress) { + var percent = (progress.fs.processedBytes / totalSize) * 100; - console.log('%s / %s (%d %)', bytesToSize(progress.fs.processedBytes), prettyTotalSize, percent); - }) + console.log( + "%s / %s (%d %)", + bytesToSize(progress.fs.processedBytes), + prettyTotalSize, + percent, + ); + }); //on stream closed we can end the request - archive.on('end', function() { - console.log('%s / %s (%d %)', prettyTotalSize, prettyTotalSize, 100); + archive.on("end", function () { + console.log("%s / %s (%d %)", prettyTotalSize, prettyTotalSize, 100); var archiveSize = archive.pointer(); - console.log('Archiver wrote %s bytes', bytesToSize(archiveSize)); - console.log('Compression ratio: %d:1', Math.round(totalSize / archiveSize)); - console.log('Space savings: %d %', (1 - (archiveSize / totalSize)) * 100); + console.log("Archiver wrote %s bytes", bytesToSize(archiveSize)); + console.log("Compression ratio: %d:1", Math.round(totalSize / archiveSize)); + console.log("Space savings: %d %", (1 - archiveSize / totalSize) * 100); }); archive.pipe(destinationStream); @@ -43,8 +48,7 @@ directorySize(directory, function(err, totalSize) { archive.directory(directory); archive.finalize(); -}) - +}); /** * You can use a nodejs module to do this, this function is really straightforward and will fail on error @@ -56,7 +60,7 @@ function directorySize(path, cb, size) { size = 0; } - fs.stat(path, function(err, stat) { + fs.stat(path, function (err, stat) { if (err) { cb(err); return; @@ -69,26 +73,34 @@ function directorySize(path, cb, size) { return; } - fs.readdir(path, function(err, paths) { + fs.readdir(path, function (err, paths) { if (err) { cb(err); return; } - async.map(paths.map(function(p) { return path + '/' + p }), directorySize, function(err, sizes) { - size += sizes.reduce(function(a, b) { return a + b }, 0); - cb(err, size); - }) - }) - }) + async.map( + paths.map(function (p) { + return path + "/" + p; + }), + directorySize, + function (err, sizes) { + size += sizes.reduce(function (a, b) { + return a + b; + }, 0); + cb(err, size); + }, + ); + }); + }); } /** * https://stackoverflow.com/questions/15900485/correct-way-to-convert-size-in-bytes-to-kb-mb-gb-in-javascript#18650828 */ function bytesToSize(bytes) { - var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; - if (bytes == 0) return '0 Byte'; - var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024))); - return Math.round(bytes / Math.pow(1024, i), 2) + ' ' + sizes[i]; -}; + var sizes = ["Bytes", "KB", "MB", "GB", "TB"]; + if (bytes == 0) return "0 Byte"; + var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024))); + return Math.round(bytes / Math.pow(1024, i), 2) + " " + sizes[i]; +} diff --git a/index.js b/index.js index 0996daef..97c449a8 100644 --- a/index.js +++ b/index.js @@ -1,84 +1,39 @@ -/** - * Archiver Vending - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var Archiver = require('./lib/core'); - -var formats = {}; - -/** - * Dispenses a new Archiver instance. - * - * @constructor - * @param {String} format The archive format to use. - * @param {Object} options See [Archiver]{@link Archiver} - * @return {Archiver} - */ -var vending = function(format, options) { - return vending.create(format, options); -}; - -/** - * Creates a new Archiver instance. - * - * @param {String} format The archive format to use. - * @param {Object} options See [Archiver]{@link Archiver} - * @return {Archiver} - */ -vending.create = function(format, options) { - if (formats[format]) { - var instance = new Archiver(format, options); - instance.setFormat(format); - instance.setModule(new formats[format](options)); - - return instance; - } else { - throw new Error('create(' + format + '): format not registered'); +import Archiver from "./lib/core.js"; +import Zip from "./lib/plugins/zip.js"; +import Tar from "./lib/plugins/tar.js"; +import Json from "./lib/plugins/json.js"; + +export { Archiver }; + +export class ZipArchive extends Archiver { + constructor(options) { + super(options); + this._format = "zip"; + this._module = new Zip(options); + this._supportsDirectory = true; + this._supportsSymlink = true; + this._modulePipe(); } -}; - -/** - * Registers a format for use with archiver. - * - * @param {String} format The name of the format. - * @param {Function} module The function for archiver to interact with. - * @return void - */ -vending.registerFormat = function(format, module) { - if (formats[format]) { - throw new Error('register(' + format + '): format already registered'); +} + +export class TarArchive extends Archiver { + constructor(options) { + super(options); + this._format = "tar"; + this._module = new Tar(options); + this._supportsDirectory = true; + this._supportsSymlink = true; + this._modulePipe(); } - - if (typeof module !== 'function') { - throw new Error('register(' + format + '): format module invalid'); +} + +export class JsonArchive extends Archiver { + constructor(options) { + super(options); + this._format = "json"; + this._module = new Json(options); + this._supportsDirectory = true; + this._supportsSymlink = true; + this._modulePipe(); } - - if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') { - throw new Error('register(' + format + '): format module missing methods'); - } - - formats[format] = module; -}; - -/** - * Check if the format is already registered. - * - * @param {String} format the name of the format. - * @return boolean - */ -vending.isRegisteredFormat = function (format) { - if (formats[format]) { - return true; - } - - return false; -}; - -vending.registerFormat('zip', require('./lib/plugins/zip')); -vending.registerFormat('tar', require('./lib/plugins/tar')); -vending.registerFormat('json', require('./lib/plugins/json')); - -module.exports = vending; \ No newline at end of file +} diff --git a/lib/core.js b/lib/core.js index 7c0a74d7..1bcf33af 100644 --- a/lib/core.js +++ b/lib/core.js @@ -1,921 +1,798 @@ -/** - * Archiver Core - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var fs = require('fs'); -var glob = require('readdir-glob'); -var async = require('async'); -var path = require('path'); -var util = require('archiver-utils'); - -var inherits = require('util').inherits; -var ArchiverError = require('./error'); -var Transform = require('readable-stream').Transform; - -var win32 = process.platform === 'win32'; - -/** - * @constructor - * @param {String} format The archive format to use. - * @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}. - */ -var Archiver = function(format, options) { - if (!(this instanceof Archiver)) { - return new Archiver(format, options); - } - - if (typeof format !== 'string') { - options = format; - format = 'zip'; - } - - options = this.options = util.defaults(options, { - highWaterMark: 1024 * 1024, - statConcurrency: 4 - }); - - Transform.call(this, options); +import { createReadStream, lstat, readlinkSync, Stats } from "fs"; +import { isStream } from "is-stream"; +import readdirGlob from "readdir-glob"; +import { Readable } from "lazystream"; +import { queue } from "async"; +import { + dirname, + relative as relativePath, + resolve as resolvePath, +} from "path"; +import { ArchiverError } from "./error.js"; +import { Transform } from "readable-stream"; +import { + dateify, + normalizeInputSource, + sanitizePath, + trailingSlashIt, +} from "./utils.js"; +const { ReaddirGlob } = readdirGlob; +const win32 = process.platform === "win32"; + +export default class Archiver extends Transform { + _supportsDirectory = false; + _supportsSymlink = false; - this._format = false; - this._module = false; - this._pending = 0; - this._pointer = 0; - - this._entriesCount = 0; - this._entriesProcessedCount = 0; - this._fsEntriesTotalBytes = 0; - this._fsEntriesProcessedBytes = 0; - - this._queue = async.queue(this._onQueueTask.bind(this), 1); - this._queue.drain(this._onQueueDrain.bind(this)); - - this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency); - this._statQueue.drain(this._onQueueDrain.bind(this)); - - this._state = { - aborted: false, - finalize: false, - finalizing: false, - finalized: false, - modulePiped: false - }; - - this._streams = []; -}; - -inherits(Archiver, Transform); - -/** - * Internal logic for `abort`. - * - * @private - * @return void - */ -Archiver.prototype._abort = function() { - this._state.aborted = true; - this._queue.kill(); - this._statQueue.kill(); - - if (this._queue.idle()) { - this._shutdown(); + /** + * @constructor + * @param {String} format The archive format to use. + * @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}. + */ + constructor(options) { + options = { + highWaterMark: 1024 * 1024, + statConcurrency: 4, + ...options, + }; + super(options); + this.options = options; + this._format = false; + this._module = false; + this._pending = 0; + this._pointer = 0; + this._entriesCount = 0; + this._entriesProcessedCount = 0; + this._fsEntriesTotalBytes = 0; + this._fsEntriesProcessedBytes = 0; + this._queue = queue(this._onQueueTask.bind(this), 1); + this._queue.drain(this._onQueueDrain.bind(this)); + this._statQueue = queue( + this._onStatQueueTask.bind(this), + options.statConcurrency, + ); + this._statQueue.drain(this._onQueueDrain.bind(this)); + this._state = { + aborted: false, + finalize: false, + finalizing: false, + finalized: false, + modulePiped: false, + }; + this._streams = []; } -}; - -/** - * Internal helper for appending files. - * - * @private - * @param {String} filepath The source filepath. - * @param {EntryData} data The entry data. - * @return void - */ -Archiver.prototype._append = function(filepath, data) { - data = data || {}; - - var task = { - source: null, - filepath: filepath - }; - if (!data.name) { - data.name = filepath; + /** + * Internal logic for `abort`. + * + * @private + * @return void + */ + _abort() { + this._state.aborted = true; + this._queue.kill(); + this._statQueue.kill(); + if (this._queue.idle()) { + this._shutdown(); + } } - - data.sourcePath = filepath; - task.data = data; - this._entriesCount++; - - if (data.stats && data.stats instanceof fs.Stats) { - task = this._updateQueueTaskWithStats(task, data.stats); - if (task) { - if (data.stats.size) { - this._fsEntriesTotalBytes += data.stats.size; + /** + * Internal helper for appending files. + * + * @private + * @param {String} filepath The source filepath. + * @param {EntryData} data The entry data. + * @return void + */ + _append(filepath, data) { + data = data || {}; + let task = { + source: null, + filepath: filepath, + }; + if (!data.name) { + data.name = filepath; + } + data.sourcePath = filepath; + task.data = data; + this._entriesCount++; + if (data.stats && data.stats instanceof Stats) { + task = this._updateQueueTaskWithStats(task, data.stats); + if (task) { + if (data.stats.size) { + this._fsEntriesTotalBytes += data.stats.size; + } + this._queue.push(task); } - - this._queue.push(task); + } else { + this._statQueue.push(task); } - } else { - this._statQueue.push(task); } -}; - -/** - * Internal logic for `finalize`. - * - * @private - * @return void - */ -Archiver.prototype._finalize = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return; - } - - this._state.finalizing = true; - - this._moduleFinalize(); - - this._state.finalizing = false; - this._state.finalized = true; -}; - -/** - * Checks the various state variables to determine if we can `finalize`. - * - * @private - * @return {Boolean} - */ -Archiver.prototype._maybeFinalize = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return false; - } - - if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); - return true; - } - - return false; -}; - -/** - * Appends an entry to the module. - * - * @private - * @fires Archiver#entry - * @param {(Buffer|Stream)} source - * @param {EntryData} data - * @param {Function} callback - * @return void - */ -Archiver.prototype._moduleAppend = function(source, data, callback) { - if (this._state.aborted) { - callback(); - return; - } - - this._module.append(source, data, function(err) { - this._task = null; - - if (this._state.aborted) { - this._shutdown(); + /** + * Internal logic for `finalize`. + * + * @private + * @return void + */ + _finalize() { + if ( + this._state.finalizing || + this._state.finalized || + this._state.aborted + ) { return; } - - if (err) { - this.emit('error', err); - setImmediate(callback); - return; + this._state.finalizing = true; + this._moduleFinalize(); + this._state.finalizing = false; + this._state.finalized = true; + } + /** + * Checks the various state variables to determine if we can `finalize`. + * + * @private + * @return {Boolean} + */ + _maybeFinalize() { + if ( + this._state.finalizing || + this._state.finalized || + this._state.aborted + ) { + return false; } - - /** - * Fires when the entry's input has been processed and appended to the archive. - * - * @event Archiver#entry - * @type {EntryData} - */ - this.emit('entry', data); - this._entriesProcessedCount++; - - if (data.stats && data.stats.size) { - this._fsEntriesProcessedBytes += data.stats.size; + if ( + this._state.finalize && + this._pending === 0 && + this._queue.idle() && + this._statQueue.idle() + ) { + this._finalize(); + return true; } - - /** - * @event Archiver#progress - * @type {ProgressData} - */ - this.emit('progress', { - entries: { - total: this._entriesCount, - processed: this._entriesProcessedCount - }, - fs: { - totalBytes: this._fsEntriesTotalBytes, - processedBytes: this._fsEntriesProcessedBytes - } - }); - - setImmediate(callback); - }.bind(this)); -}; - -/** - * Finalizes the module. - * - * @private - * @return void - */ -Archiver.prototype._moduleFinalize = function() { - if (typeof this._module.finalize === 'function') { - this._module.finalize(); - } else if (typeof this._module.end === 'function') { - this._module.end(); - } else { - this.emit('error', new ArchiverError('NOENDMETHOD')); - } -}; - -/** - * Pipes the module to our internal stream with error bubbling. - * - * @private - * @return void - */ -Archiver.prototype._modulePipe = function() { - this._module.on('error', this._onModuleError.bind(this)); - this._module.pipe(this); - this._state.modulePiped = true; -}; - -/** - * Determines if the current module supports a defined feature. - * - * @private - * @param {String} key - * @return {Boolean} - */ -Archiver.prototype._moduleSupports = function(key) { - if (!this._module.supports || !this._module.supports[key]) { return false; } - - return this._module.supports[key]; -}; - -/** - * Unpipes the module from our internal stream. - * - * @private - * @return void - */ -Archiver.prototype._moduleUnpipe = function() { - this._module.unpipe(this); - this._state.modulePiped = false; -}; - -/** - * Normalizes entry data with fallbacks for key properties. - * - * @private - * @param {Object} data - * @param {fs.Stats} stats - * @return {Object} - */ -Archiver.prototype._normalizeEntryData = function(data, stats) { - data = util.defaults(data, { - type: 'file', - name: null, - date: null, - mode: null, - prefix: null, - sourcePath: null, - stats: false - }); - - if (stats && data.stats === false) { - data.stats = stats; - } - - var isDir = data.type === 'directory'; - - if (data.name) { - if (typeof data.prefix === 'string' && '' !== data.prefix) { - data.name = data.prefix + '/' + data.name; - data.prefix = null; - } - - data.name = util.sanitizePath(data.name); - - if (data.type !== 'symlink' && data.name.slice(-1) === '/') { - isDir = true; - data.type = 'directory'; - } else if (isDir) { - data.name += '/'; + /** + * Appends an entry to the module. + * + * @private + * @fires Archiver#entry + * @param {(Buffer|Stream)} source + * @param {EntryData} data + * @param {Function} callback + * @return void + */ + _moduleAppend(source, data, callback) { + if (this._state.aborted) { + callback(); + return; } + this._module.append( + source, + data, + function (err) { + this._task = null; + if (this._state.aborted) { + this._shutdown(); + return; + } + if (err) { + this.emit("error", err); + setImmediate(callback); + return; + } + /** + * Fires when the entry's input has been processed and appended to the archive. + * + * @event Archiver#entry + * @type {EntryData} + */ + this.emit("entry", data); + this._entriesProcessedCount++; + if (data.stats && data.stats.size) { + this._fsEntriesProcessedBytes += data.stats.size; + } + /** + * @event Archiver#progress + * @type {ProgressData} + */ + this.emit("progress", { + entries: { + total: this._entriesCount, + processed: this._entriesProcessedCount, + }, + fs: { + totalBytes: this._fsEntriesTotalBytes, + processedBytes: this._fsEntriesProcessedBytes, + }, + }); + setImmediate(callback); + }.bind(this), + ); } - - // 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644 - if (typeof data.mode === 'number') { - if (win32) { - data.mode &= 511; - } else { - data.mode &= 4095 - } - } else if (data.stats && data.mode === null) { - if (win32) { - data.mode = data.stats.mode & 511; + /** + * Finalizes the module. + * + * @private + * @return void + */ + _moduleFinalize() { + if (typeof this._module.finalize === "function") { + this._module.finalize(); + } else if (typeof this._module.end === "function") { + this._module.end(); } else { - data.mode = data.stats.mode & 4095; - } - - // stat isn't reliable on windows; force 0755 for dir - if (win32 && isDir) { - data.mode = 493; + this.emit("error", new ArchiverError("NOENDMETHOD")); } - } else if (data.mode === null) { - data.mode = isDir ? 493 : 420; - } - - if (data.stats && data.date === null) { - data.date = data.stats.mtime; - } else { - data.date = util.dateify(data.date); } - - return data; -}; - -/** - * Error listener that re-emits error on to our internal stream. - * - * @private - * @param {Error} err - * @return void - */ -Archiver.prototype._onModuleError = function(err) { /** - * @event Archiver#error - * @type {ErrorData} + * Pipes the module to our internal stream with error bubbling. + * + * @private + * @return void */ - this.emit('error', err); -}; - -/** - * Checks the various state variables after queue has drained to determine if - * we need to `finalize`. - * - * @private - * @return void - */ -Archiver.prototype._onQueueDrain = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return; - } - - if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); - } -}; - -/** - * Appends each queue task to the module. - * - * @private - * @param {Object} task - * @param {Function} callback - * @return void - */ -Archiver.prototype._onQueueTask = function(task, callback) { - var fullCallback = () => { - if(task.data.callback) { - task.data.callback(); - } - callback(); - } - - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - fullCallback(); - return; + _modulePipe() { + this._module.on("error", this._onModuleError.bind(this)); + this._module.pipe(this); + this._state.modulePiped = true; } - - this._task = task; - this._moduleAppend(task.source, task.data, fullCallback); -}; - -/** - * Performs a file stat and reinjects the task back into the queue. - * - * @private - * @param {Object} task - * @param {Function} callback - * @return void - */ -Archiver.prototype._onStatQueueTask = function(task, callback) { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - callback(); - return; + /** + * Unpipes the module from our internal stream. + * + * @private + * @return void + */ + _moduleUnpipe() { + this._module.unpipe(this); + this._state.modulePiped = false; } - - fs.lstat(task.filepath, function(err, stats) { - if (this._state.aborted) { - setImmediate(callback); - return; + /** + * Normalizes entry data with fallbacks for key properties. + * + * @private + * @param {Object} data + * @param {fs.Stats} stats + * @return {Object} + */ + _normalizeEntryData(data, stats) { + data = { + type: "file", + name: null, + date: null, + mode: null, + prefix: null, + sourcePath: null, + stats: false, + ...data, + }; + if (stats && data.stats === false) { + data.stats = stats; } - - if (err) { - this._entriesCount--; - - /** - * @event Archiver#warning - * @type {ErrorData} - */ - this.emit('warning', err); - setImmediate(callback); - return; + let isDir = data.type === "directory"; + if (data.name) { + if (typeof data.prefix === "string" && "" !== data.prefix) { + data.name = data.prefix + "/" + data.name; + data.prefix = null; + } + data.name = sanitizePath(data.name); + if (data.type !== "symlink" && data.name.slice(-1) === "/") { + isDir = true; + data.type = "directory"; + } else if (isDir) { + data.name += "/"; + } } - - task = this._updateQueueTaskWithStats(task, stats); - - if (task) { - if (stats.size) { - this._fsEntriesTotalBytes += stats.size; + // 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644 + if (typeof data.mode === "number") { + if (win32) { + data.mode &= 511; + } else { + data.mode &= 4095; } - - this._queue.push(task); + } else if (data.stats && data.mode === null) { + if (win32) { + data.mode = data.stats.mode & 511; + } else { + data.mode = data.stats.mode & 4095; + } + // stat isn't reliable on windows; force 0755 for dir + if (win32 && isDir) { + data.mode = 493; + } + } else if (data.mode === null) { + data.mode = isDir ? 493 : 420; } - - setImmediate(callback); - }.bind(this)); -}; - -/** - * Unpipes the module and ends our internal stream. - * - * @private - * @return void - */ -Archiver.prototype._shutdown = function() { - this._moduleUnpipe(); - this.end(); -}; - -/** - * Tracks the bytes emitted by our internal stream. - * - * @private - * @param {Buffer} chunk - * @param {String} encoding - * @param {Function} callback - * @return void - */ -Archiver.prototype._transform = function(chunk, encoding, callback) { - if (chunk) { - this._pointer += chunk.length; - } - - callback(null, chunk); -}; - -/** - * Updates and normalizes a queue task using stats data. - * - * @private - * @param {Object} task - * @param {fs.Stats} stats - * @return {Object} - */ -Archiver.prototype._updateQueueTaskWithStats = function(task, stats) { - if (stats.isFile()) { - task.data.type = 'file'; - task.data.sourceType = 'stream'; - task.source = util.lazyReadStream(task.filepath); - } else if (stats.isDirectory() && this._moduleSupports('directory')) { - task.data.name = util.trailingSlashIt(task.data.name); - task.data.type = 'directory'; - task.data.sourcePath = util.trailingSlashIt(task.filepath); - task.data.sourceType = 'buffer'; - task.source = Buffer.concat([]); - } else if (stats.isSymbolicLink() && this._moduleSupports('symlink')) { - var linkPath = fs.readlinkSync(task.filepath); - var dirName = path.dirname(task.filepath); - task.data.type = 'symlink'; - task.data.linkname = path.relative(dirName, path.resolve(dirName, linkPath)); - task.data.sourceType = 'buffer'; - task.source = Buffer.concat([]); - } else { - if (stats.isDirectory()) { - this.emit('warning', new ArchiverError('DIRECTORYNOTSUPPORTED', task.data)); - } else if (stats.isSymbolicLink()) { - this.emit('warning', new ArchiverError('SYMLINKNOTSUPPORTED', task.data)); + if (data.stats && data.date === null) { + data.date = data.stats.mtime; } else { - this.emit('warning', new ArchiverError('ENTRYNOTSUPPORTED', task.data)); + data.date = dateify(data.date); } - - return null; - } - - task.data = this._normalizeEntryData(task.data, stats); - - return task; -}; - -/** - * Aborts the archiving process, taking a best-effort approach, by: - * - * - removing any pending queue tasks - * - allowing any active queue workers to finish - * - detaching internal module pipes - * - ending both sides of the Transform stream - * - * It will NOT drain any remaining sources. - * - * @return {this} - */ -Archiver.prototype.abort = function() { - if (this._state.aborted || this._state.finalized) { - return this; - } - - this._abort(); - - return this; -}; - -/** - * Appends an input source (text string, buffer, or stream) to the instance. - * - * When the instance has received, processed, and emitted the input, the `entry` - * event is fired. - * - * @fires Archiver#entry - * @param {(Buffer|Stream|String)} source The input source. - * @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.append = function(source, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } - - data = this._normalizeEntryData(data); - - if (typeof data.name !== 'string' || data.name.length === 0) { - this.emit('error', new ArchiverError('ENTRYNAMEREQUIRED')); - return this; - } - - if (data.type === 'directory' && !this._moduleSupports('directory')) { - this.emit('error', new ArchiverError('DIRECTORYNOTSUPPORTED', { name: data.name })); - return this; - } - - source = util.normalizeInputSource(source); - - if (Buffer.isBuffer(source)) { - data.sourceType = 'buffer'; - } else if (util.isStream(source)) { - data.sourceType = 'stream'; - } else { - this.emit('error', new ArchiverError('INPUTSTEAMBUFFERREQUIRED', { name: data.name })); - return this; - } - - this._entriesCount++; - this._queue.push({ - data: data, - source: source - }); - - return this; -}; - -/** - * Appends a directory and its files, recursively, given its dirpath. - * - * @param {String} dirpath The source directory path. - * @param {String} destpath The destination path within the archive. - * @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.directory = function(dirpath, destpath, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } - - if (typeof dirpath !== 'string' || dirpath.length === 0) { - this.emit('error', new ArchiverError('DIRECTORYDIRPATHREQUIRED')); - return this; - } - - this._pending++; - - if (destpath === false) { - destpath = ''; - } else if (typeof destpath !== 'string'){ - destpath = dirpath; - } - - var dataFunction = false; - if (typeof data === 'function') { - dataFunction = data; - data = {}; - } else if (typeof data !== 'object') { - data = {}; + return data; } - - var globOptions = { - stat: true, - dot: true - }; - - function onGlobEnd() { - this._pending--; - this._maybeFinalize(); + /** + * Error listener that re-emits error on to our internal stream. + * + * @private + * @param {Error} err + * @return void + */ + _onModuleError(err) { + /** + * @event Archiver#error + * @type {ErrorData} + */ + this.emit("error", err); } - - function onGlobError(err) { - this.emit('error', err); + /** + * Checks the various state variables after queue has drained to determine if + * we need to `finalize`. + * + * @private + * @return void + */ + _onQueueDrain() { + if ( + this._state.finalizing || + this._state.finalized || + this._state.aborted + ) { + return; + } + if ( + this._state.finalize && + this._pending === 0 && + this._queue.idle() && + this._statQueue.idle() + ) { + this._finalize(); + } } - - function onGlobMatch(match){ - globber.pause(); - - var ignoreMatch = false; - var entryData = Object.assign({}, data); - entryData.name = match.relative; - entryData.prefix = destpath; - entryData.stats = match.stat; - entryData.callback = globber.resume.bind(globber); - - try { - if (dataFunction) { - entryData = dataFunction(entryData); - - if (entryData === false) { - ignoreMatch = true; - } else if (typeof entryData !== 'object') { - throw new ArchiverError('DIRECTORYFUNCTIONINVALIDDATA', { dirpath: dirpath }); - } + /** + * Appends each queue task to the module. + * + * @private + * @param {Object} task + * @param {Function} callback + * @return void + */ + _onQueueTask(task, callback) { + const fullCallback = () => { + if (task.data.callback) { + task.data.callback(); } - } catch(e) { - this.emit('error', e); + callback(); + }; + if ( + this._state.finalizing || + this._state.finalized || + this._state.aborted + ) { + fullCallback(); return; } - - if (ignoreMatch) { - globber.resume(); + this._task = task; + this._moduleAppend(task.source, task.data, fullCallback); + } + /** + * Performs a file stat and reinjects the task back into the queue. + * + * @private + * @param {Object} task + * @param {Function} callback + * @return void + */ + _onStatQueueTask(task, callback) { + if ( + this._state.finalizing || + this._state.finalized || + this._state.aborted + ) { + callback(); return; } - - this._append(match.absolute, entryData); - } - - var globber = glob(dirpath, globOptions); - globber.on('error', onGlobError.bind(this)); - globber.on('match', onGlobMatch.bind(this)); - globber.on('end', onGlobEnd.bind(this)); - - return this; -}; - -/** - * Appends a file given its filepath using a - * [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to - * prevent issues with open file limits. - * - * When the instance has received, processed, and emitted the file, the `entry` - * event is fired. - * - * @param {String} filepath The source filepath. - * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.file = function(filepath, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } - - if (typeof filepath !== 'string' || filepath.length === 0) { - this.emit('error', new ArchiverError('FILEFILEPATHREQUIRED')); - return this; - } - - this._append(filepath, data); - - return this; -}; - -/** - * Appends multiple files that match a glob pattern. - * - * @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/minimatch} to match. - * @param {Object} options See [node-readdir-glob]{@link https://github.com/yqnn/node-readdir-glob#options}. - * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.glob = function(pattern, options, data) { - this._pending++; - - options = util.defaults(options, { - stat: true, - pattern: pattern - }); - - function onGlobEnd() { - this._pending--; - this._maybeFinalize(); - } - - function onGlobError(err) { - this.emit('error', err); - } - - function onGlobMatch(match){ - globber.pause(); - var entryData = Object.assign({}, data); - entryData.callback = globber.resume.bind(globber); - entryData.stats = match.stat; - entryData.name = match.relative; - - this._append(match.absolute, entryData); - } - - var globber = glob(options.cwd || '.', options); - globber.on('error', onGlobError.bind(this)); - globber.on('match', onGlobMatch.bind(this)); - globber.on('end', onGlobEnd.bind(this)); - - return this; -}; - -/** - * Finalizes the instance and prevents further appending to the archive - * structure (queue will continue til drained). - * - * The `end`, `close` or `finish` events on the destination stream may fire - * right after calling this method so you should set listeners beforehand to - * properly detect stream completion. - * - * @return {Promise} - */ -Archiver.prototype.finalize = function() { - if (this._state.aborted) { - var abortedError = new ArchiverError('ABORTED'); - this.emit('error', abortedError); - return Promise.reject(abortedError); + lstat( + task.filepath, + function (err, stats) { + if (this._state.aborted) { + setImmediate(callback); + return; + } + if (err) { + this._entriesCount--; + /** + * @event Archiver#warning + * @type {ErrorData} + */ + this.emit("warning", err); + setImmediate(callback); + return; + } + task = this._updateQueueTaskWithStats(task, stats); + if (task) { + if (stats.size) { + this._fsEntriesTotalBytes += stats.size; + } + this._queue.push(task); + } + setImmediate(callback); + }.bind(this), + ); } - - if (this._state.finalize) { - var finalizingError = new ArchiverError('FINALIZING'); - this.emit('error', finalizingError); - return Promise.reject(finalizingError); + /** + * Unpipes the module and ends our internal stream. + * + * @private + * @return void + */ + _shutdown() { + this._moduleUnpipe(); + this.end(); } - - this._state.finalize = true; - - if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); + /** + * Tracks the bytes emitted by our internal stream. + * + * @private + * @param {Buffer} chunk + * @param {String} encoding + * @param {Function} callback + * @return void + */ + _transform(chunk, encoding, callback) { + if (chunk) { + this._pointer += chunk.length; + } + callback(null, chunk); } - - var self = this; - - return new Promise(function(resolve, reject) { - var errored; - - self._module.on('end', function() { - if (!errored) { - resolve(); + /** + * Updates and normalizes a queue task using stats data. + * + * @private + * @param {Object} task + * @param {Stats} stats + * @return {Object} + */ + _updateQueueTaskWithStats(task, stats) { + if (stats.isFile()) { + task.data.type = "file"; + task.data.sourceType = "stream"; + task.source = new Readable(function () { + return createReadStream(task.filepath); + }); + } else if (stats.isDirectory() && this._supportsDirectory) { + task.data.name = trailingSlashIt(task.data.name); + task.data.type = "directory"; + task.data.sourcePath = trailingSlashIt(task.filepath); + task.data.sourceType = "buffer"; + task.source = Buffer.concat([]); + } else if (stats.isSymbolicLink() && this._supportsSymlink) { + const linkPath = readlinkSync(task.filepath); + const dirName = dirname(task.filepath); + task.data.type = "symlink"; + task.data.linkname = relativePath( + dirName, + resolvePath(dirName, linkPath), + ); + task.data.sourceType = "buffer"; + task.source = Buffer.concat([]); + } else { + if (stats.isDirectory()) { + this.emit( + "warning", + new ArchiverError("DIRECTORYNOTSUPPORTED", task.data), + ); + } else if (stats.isSymbolicLink()) { + this.emit( + "warning", + new ArchiverError("SYMLINKNOTSUPPORTED", task.data), + ); + } else { + this.emit("warning", new ArchiverError("ENTRYNOTSUPPORTED", task.data)); } - }) - - self._module.on('error', function(err) { - errored = true; - reject(err); - }) - }) -}; - -/** - * Sets the module format name used for archiving. - * - * @param {String} format The name of the format. - * @return {this} - */ -Archiver.prototype.setFormat = function(format) { - if (this._format) { - this.emit('error', new ArchiverError('FORMATSET')); - return this; + return null; + } + task.data = this._normalizeEntryData(task.data, stats); + return task; } - - this._format = format; - - return this; -}; - -/** - * Sets the module used for archiving. - * - * @param {Function} module The function for archiver to interact with. - * @return {this} - */ -Archiver.prototype.setModule = function(module) { - if (this._state.aborted) { - this.emit('error', new ArchiverError('ABORTED')); + /** + * Aborts the archiving process, taking a best-effort approach, by: + * + * - removing any pending queue tasks + * - allowing any active queue workers to finish + * - detaching internal module pipes + * - ending both sides of the Transform stream + * + * It will NOT drain any remaining sources. + * + * @return {this} + */ + abort() { + if (this._state.aborted || this._state.finalized) { + return this; + } + this._abort(); return this; } - - if (this._state.module) { - this.emit('error', new ArchiverError('MODULESET')); + /** + * Appends an input source (text string, buffer, or stream) to the instance. + * + * When the instance has received, processed, and emitted the input, the `entry` + * event is fired. + * + * @fires Archiver#entry + * @param {(Buffer|Stream|String)} source The input source. + * @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}. + * @return {this} + */ + append(source, data) { + if (this._state.finalize || this._state.aborted) { + this.emit("error", new ArchiverError("QUEUECLOSED")); + return this; + } + data = this._normalizeEntryData(data); + if (typeof data.name !== "string" || data.name.length === 0) { + this.emit("error", new ArchiverError("ENTRYNAMEREQUIRED")); + return this; + } + if (data.type === "directory" && !this._supportsDirectory) { + this.emit( + "error", + new ArchiverError("DIRECTORYNOTSUPPORTED", { name: data.name }), + ); + return this; + } + source = normalizeInputSource(source); + if (Buffer.isBuffer(source)) { + data.sourceType = "buffer"; + } else if (isStream(source)) { + data.sourceType = "stream"; + } else { + this.emit( + "error", + new ArchiverError("INPUTSTEAMBUFFERREQUIRED", { name: data.name }), + ); + return this; + } + this._entriesCount++; + this._queue.push({ + data: data, + source: source, + }); return this; } - - this._module = module; - this._modulePipe(); - - return this; -}; - -/** - * Appends a symlink to the instance. - * - * This does NOT interact with filesystem and is used for programmatically creating symlinks. - * - * @param {String} filepath The symlink path (within archive). - * @param {String} target The target path (within archive). - * @param {Number} mode Sets the entry permissions. - * @return {this} - */ -Archiver.prototype.symlink = function(filepath, target, mode) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); + /** + * Appends a directory and its files, recursively, given its dirpath. + * + * @param {String} dirpath The source directory path. + * @param {String} destpath The destination path within the archive. + * @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} + */ + directory(dirpath, destpath, data) { + if (this._state.finalize || this._state.aborted) { + this.emit("error", new ArchiverError("QUEUECLOSED")); + return this; + } + if (typeof dirpath !== "string" || dirpath.length === 0) { + this.emit("error", new ArchiverError("DIRECTORYDIRPATHREQUIRED")); + return this; + } + this._pending++; + if (destpath === false) { + destpath = ""; + } else if (typeof destpath !== "string") { + destpath = dirpath; + } + var dataFunction = false; + if (typeof data === "function") { + dataFunction = data; + data = {}; + } else if (typeof data !== "object") { + data = {}; + } + var globOptions = { + stat: true, + dot: true, + }; + function onGlobEnd() { + this._pending--; + this._maybeFinalize(); + } + function onGlobError(err) { + this.emit("error", err); + } + function onGlobMatch(match) { + globber.pause(); + let ignoreMatch = false; + let entryData = Object.assign({}, data); + entryData.name = match.relative; + entryData.prefix = destpath; + entryData.stats = match.stat; + entryData.callback = globber.resume.bind(globber); + try { + if (dataFunction) { + entryData = dataFunction(entryData); + if (entryData === false) { + ignoreMatch = true; + } else if (typeof entryData !== "object") { + throw new ArchiverError("DIRECTORYFUNCTIONINVALIDDATA", { + dirpath: dirpath, + }); + } + } + } catch (e) { + this.emit("error", e); + return; + } + if (ignoreMatch) { + globber.resume(); + return; + } + this._append(match.absolute, entryData); + } + const globber = readdirGlob(dirpath, globOptions); + globber.on("error", onGlobError.bind(this)); + globber.on("match", onGlobMatch.bind(this)); + globber.on("end", onGlobEnd.bind(this)); return this; } - - if (typeof filepath !== 'string' || filepath.length === 0) { - this.emit('error', new ArchiverError('SYMLINKFILEPATHREQUIRED')); + /** + * Appends a file given its filepath using a + * [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to + * prevent issues with open file limits. + * + * When the instance has received, processed, and emitted the file, the `entry` + * event is fired. + * + * @param {String} filepath The source filepath. + * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} + */ + file(filepath, data) { + if (this._state.finalize || this._state.aborted) { + this.emit("error", new ArchiverError("QUEUECLOSED")); + return this; + } + if (typeof filepath !== "string" || filepath.length === 0) { + this.emit("error", new ArchiverError("FILEFILEPATHREQUIRED")); + return this; + } + this._append(filepath, data); return this; } - - if (typeof target !== 'string' || target.length === 0) { - this.emit('error', new ArchiverError('SYMLINKTARGETREQUIRED', { filepath: filepath })); + /** + * Appends multiple files that match a glob pattern. + * + * @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/minimatch} to match. + * @param {Object} options See [node-readdir-glob]{@link https://github.com/yqnn/node-readdir-glob#options}. + * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} + */ + glob(pattern, options, data) { + this._pending++; + options = { + stat: true, + pattern: pattern, + ...options, + }; + function onGlobEnd() { + this._pending--; + this._maybeFinalize(); + } + function onGlobError(err) { + this.emit("error", err); + } + function onGlobMatch(match) { + globber.pause(); + const entryData = Object.assign({}, data); + entryData.callback = globber.resume.bind(globber); + entryData.stats = match.stat; + entryData.name = match.relative; + this._append(match.absolute, entryData); + } + const globber = new ReaddirGlob(options.cwd || ".", options); + globber.on("error", onGlobError.bind(this)); + globber.on("match", onGlobMatch.bind(this)); + globber.on("end", onGlobEnd.bind(this)); return this; } - - if (!this._moduleSupports('symlink')) { - this.emit('error', new ArchiverError('SYMLINKNOTSUPPORTED', { filepath: filepath })); + /** + * Finalizes the instance and prevents further appending to the archive + * structure (queue will continue til drained). + * + * The `end`, `close` or `finish` events on the destination stream may fire + * right after calling this method so you should set listeners beforehand to + * properly detect stream completion. + * + * @return {Promise} + */ + finalize() { + if (this._state.aborted) { + var abortedError = new ArchiverError("ABORTED"); + this.emit("error", abortedError); + return Promise.reject(abortedError); + } + if (this._state.finalize) { + var finalizingError = new ArchiverError("FINALIZING"); + this.emit("error", finalizingError); + return Promise.reject(finalizingError); + } + this._state.finalize = true; + if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + this._finalize(); + } + var self = this; + return new Promise(function (resolve, reject) { + var errored; + self._module.on("end", function () { + if (!errored) { + resolve(); + } + }); + self._module.on("error", function (err) { + errored = true; + reject(err); + }); + }); + } + /** + * Appends a symlink to the instance. + * + * This does NOT interact with filesystem and is used for programmatically creating symlinks. + * + * @param {String} filepath The symlink path (within archive). + * @param {String} target The target path (within archive). + * @param {Number} mode Sets the entry permissions. + * @return {this} + */ + symlink(filepath, target, mode) { + if (this._state.finalize || this._state.aborted) { + this.emit("error", new ArchiverError("QUEUECLOSED")); + return this; + } + if (typeof filepath !== "string" || filepath.length === 0) { + this.emit("error", new ArchiverError("SYMLINKFILEPATHREQUIRED")); + return this; + } + if (typeof target !== "string" || target.length === 0) { + this.emit( + "error", + new ArchiverError("SYMLINKTARGETREQUIRED", { filepath: filepath }), + ); + return this; + } + if (!this._supportsSymlink) { + this.emit( + "error", + new ArchiverError("SYMLINKNOTSUPPORTED", { filepath: filepath }), + ); + return this; + } + var data = {}; + data.type = "symlink"; + data.name = filepath.replace(/\\/g, "/"); + data.linkname = target.replace(/\\/g, "/"); + data.sourceType = "buffer"; + if (typeof mode === "number") { + data.mode = mode; + } + this._entriesCount++; + this._queue.push({ + data: data, + source: Buffer.concat([]), + }); return this; } - - var data = {}; - data.type = 'symlink'; - data.name = filepath.replace(/\\/g, '/'); - data.linkname = target.replace(/\\/g, '/'); - data.sourceType = 'buffer'; - - if (typeof mode === "number") { - data.mode = mode; + /** + * Returns the current length (in bytes) that has been emitted. + * + * @return {Number} + */ + pointer() { + return this._pointer; } - - this._entriesCount++; - this._queue.push({ - data: data, - source: Buffer.concat([]) - }); - - return this; -}; - -/** - * Returns the current length (in bytes) that has been emitted. - * - * @return {Number} - */ -Archiver.prototype.pointer = function() { - return this._pointer; -}; - -/** - * Middleware-like helper that has yet to be fully implemented. - * - * @private - * @param {Function} plugin - * @return {this} - */ -Archiver.prototype.use = function(plugin) { - this._streams.push(plugin); - return this; -}; - -module.exports = Archiver; +} /** * @typedef {Object} CoreOptions diff --git a/lib/error.js b/lib/error.js index 6bcb0ae1..1bb99a7a 100644 --- a/lib/error.js +++ b/lib/error.js @@ -1,30 +1,28 @@ -/** - * Archiver Core - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ - -var util = require('util'); +import util from "util"; const ERROR_CODES = { - 'ABORTED': 'archive was aborted', - 'DIRECTORYDIRPATHREQUIRED': 'diretory dirpath argument must be a non-empty string value', - 'DIRECTORYFUNCTIONINVALIDDATA': 'invalid data returned by directory custom data function', - 'ENTRYNAMEREQUIRED': 'entry name must be a non-empty string value', - 'FILEFILEPATHREQUIRED': 'file filepath argument must be a non-empty string value', - 'FINALIZING': 'archive already finalizing', - 'QUEUECLOSED': 'queue closed', - 'NOENDMETHOD': 'no suitable finalize/end method defined by module', - 'DIRECTORYNOTSUPPORTED': 'support for directory entries not defined by module', - 'FORMATSET': 'archive format already set', - 'INPUTSTEAMBUFFERREQUIRED': 'input source must be valid Stream or Buffer instance', - 'MODULESET': 'module already set', - 'SYMLINKNOTSUPPORTED': 'support for symlink entries not defined by module', - 'SYMLINKFILEPATHREQUIRED': 'symlink filepath argument must be a non-empty string value', - 'SYMLINKTARGETREQUIRED': 'symlink target argument must be a non-empty string value', - 'ENTRYNOTSUPPORTED': 'entry not supported' + ABORTED: "archive was aborted", + DIRECTORYDIRPATHREQUIRED: + "diretory dirpath argument must be a non-empty string value", + DIRECTORYFUNCTIONINVALIDDATA: + "invalid data returned by directory custom data function", + ENTRYNAMEREQUIRED: "entry name must be a non-empty string value", + FILEFILEPATHREQUIRED: + "file filepath argument must be a non-empty string value", + FINALIZING: "archive already finalizing", + QUEUECLOSED: "queue closed", + NOENDMETHOD: "no suitable finalize/end method defined by module", + DIRECTORYNOTSUPPORTED: "support for directory entries not defined by module", + FORMATSET: "archive format already set", + INPUTSTEAMBUFFERREQUIRED: + "input source must be valid Stream or Buffer instance", + MODULESET: "module already set", + SYMLINKNOTSUPPORTED: "support for symlink entries not defined by module", + SYMLINKFILEPATHREQUIRED: + "symlink filepath argument must be a non-empty string value", + SYMLINKTARGETREQUIRED: + "symlink target argument must be a non-empty string value", + ENTRYNOTSUPPORTED: "entry not supported", }; function ArchiverError(code, data) { @@ -34,7 +32,6 @@ function ArchiverError(code, data) { this.code = code; this.data = data; } - util.inherits(ArchiverError, Error); -exports = module.exports = ArchiverError; \ No newline at end of file +export { ArchiverError }; diff --git a/lib/plugins/json.js b/lib/plugins/json.js index caf63de9..b377c90e 100644 --- a/lib/plugins/json.js +++ b/lib/plugins/json.js @@ -1,3 +1,7 @@ +import { Transform } from "readable-stream"; +import crc32 from "buffer-crc32"; +import { collectStream } from "../utils.js"; + /** * JSON Format Plugin * @@ -5,106 +9,71 @@ * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var inherits = require('util').inherits; -var Transform = require('readable-stream').Transform; - -var crc32 = require('buffer-crc32'); -var util = require('archiver-utils'); - -/** - * @constructor - * @param {(JsonOptions|TransformOptions)} options - */ -var Json = function(options) { - if (!(this instanceof Json)) { - return new Json(options); +export default class Json extends Transform { + /** + * @constructor + * @param {(JsonOptions|TransformOptions)} options + */ + constructor(options) { + super({ ...options }); + this.files = []; } - - options = this.options = util.defaults(options, {}); - - Transform.call(this, options); - - this.supports = { - directory: true, - symlink: true - }; - - this.files = []; -}; - -inherits(Json, Transform); - -/** - * [_transform description] - * - * @private - * @param {Buffer} chunk - * @param {String} encoding - * @param {Function} callback - * @return void - */ -Json.prototype._transform = function(chunk, encoding, callback) { - callback(null, chunk); -}; - -/** - * [_writeStringified description] - * - * @private - * @return void - */ -Json.prototype._writeStringified = function() { - var fileString = JSON.stringify(this.files); - this.write(fileString); -}; - -/** - * [append description] - * - * @param {(Buffer|Stream)} source - * @param {EntryData} data - * @param {Function} callback - * @return void - */ -Json.prototype.append = function(source, data, callback) { - var self = this; - - data.crc32 = 0; - - function onend(err, sourceBuffer) { - if (err) { - callback(err); - return; + /** + * [_transform description] + * + * @private + * @param {Buffer} chunk + * @param {String} encoding + * @param {Function} callback + * @return void + */ + _transform(chunk, encoding, callback) { + callback(null, chunk); + } + /** + * [_writeStringified description] + * + * @private + * @return void + */ + _writeStringified() { + var fileString = JSON.stringify(this.files); + this.write(fileString); + } + /** + * [append description] + * + * @param {(Buffer|Stream)} source + * @param {EntryData} data + * @param {Function} callback + * @return void + */ + append(source, data, callback) { + var self = this; + data.crc32 = 0; + function onend(err, sourceBuffer) { + if (err) { + callback(err); + return; + } + data.size = sourceBuffer.length || 0; + data.crc32 = crc32.unsigned(sourceBuffer); + self.files.push(data); + callback(null, data); + } + if (data.sourceType === "buffer") { + onend(null, source); + } else if (data.sourceType === "stream") { + collectStream(source, onend); } - - data.size = sourceBuffer.length || 0; - data.crc32 = crc32.unsigned(sourceBuffer); - - self.files.push(data); - - callback(null, data); } - - if (data.sourceType === 'buffer') { - onend(null, source); - } else if (data.sourceType === 'stream') { - util.collectStream(source, onend); + /** + * [finalize description] + * + * @return void + */ + finalize() { + this._writeStringified(); + this.end(); } -}; - -/** - * [finalize description] - * - * @return void - */ -Json.prototype.finalize = function() { - this._writeStringified(); - this.end(); -}; - -module.exports = Json; - -/** - * @typedef {Object} JsonOptions - * @global - */ +} diff --git a/lib/plugins/tar.js b/lib/plugins/tar.js index 3a170090..1a4e1d04 100644 --- a/lib/plugins/tar.js +++ b/lib/plugins/tar.js @@ -1,3 +1,7 @@ +import zlib from "zlib"; +import engine from "tar-stream"; +import { collectStream } from "../utils.js"; + /** * TAR Format Plugin * @@ -5,163 +9,110 @@ * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var zlib = require('zlib'); - -var engine = require('tar-stream'); -var util = require('archiver-utils'); - -/** - * @constructor - * @param {TarOptions} options - */ -var Tar = function(options) { - if (!(this instanceof Tar)) { - return new Tar(options); - } - - options = this.options = util.defaults(options, { - gzip: false - }); - - if (typeof options.gzipOptions !== 'object') { - options.gzipOptions = {}; +export default class Tar { + /** + * @constructor + * @param {TarOptions} options + */ + constructor(options) { + options = this.options = { + gzip: false, + ...options, + }; + if (typeof options.gzipOptions !== "object") { + options.gzipOptions = {}; + } + this.engine = engine.pack(options); + this.compressor = false; + if (options.gzip) { + this.compressor = zlib.createGzip(options.gzipOptions); + this.compressor.on("error", this._onCompressorError.bind(this)); + } } - - this.supports = { - directory: true, - symlink: true - }; - - this.engine = engine.pack(options); - this.compressor = false; - - if (options.gzip) { - this.compressor = zlib.createGzip(options.gzipOptions); - this.compressor.on('error', this._onCompressorError.bind(this)); + /** + * [_onCompressorError description] + * + * @private + * @param {Error} err + * @return void + */ + _onCompressorError(err) { + this.engine.emit("error", err); } -}; - -/** - * [_onCompressorError description] - * - * @private - * @param {Error} err - * @return void - */ -Tar.prototype._onCompressorError = function(err) { - this.engine.emit('error', err); -}; - -/** - * [append description] - * - * @param {(Buffer|Stream)} source - * @param {TarEntryData} data - * @param {Function} callback - * @return void - */ -Tar.prototype.append = function(source, data, callback) { - var self = this; - - data.mtime = data.date; - - function append(err, sourceBuffer) { - if (err) { - callback(err); - return; + /** + * [append description] + * + * @param {(Buffer|Stream)} source + * @param {TarEntryData} data + * @param {Function} callback + * @return void + */ + append(source, data, callback) { + var self = this; + data.mtime = data.date; + function append(err, sourceBuffer) { + if (err) { + callback(err); + return; + } + self.engine.entry(data, sourceBuffer, function (err) { + callback(err, data); + }); + } + if (data.sourceType === "buffer") { + append(null, source); + } else if (data.sourceType === "stream" && data.stats) { + data.size = data.stats.size; + var entry = self.engine.entry(data, function (err) { + callback(err, data); + }); + source.pipe(entry); + } else if (data.sourceType === "stream") { + collectStream(source, append); } - - self.engine.entry(data, sourceBuffer, function(err) { - callback(err, data); - }); } - - if (data.sourceType === 'buffer') { - append(null, source); - } else if (data.sourceType === 'stream' && data.stats) { - data.size = data.stats.size; - - var entry = self.engine.entry(data, function(err) { - callback(err, data); - }); - - source.pipe(entry); - } else if (data.sourceType === 'stream') { - util.collectStream(source, append); + /** + * [finalize description] + * + * @return void + */ + finalize() { + this.engine.finalize(); } -}; - -/** - * [finalize description] - * - * @return void - */ -Tar.prototype.finalize = function() { - this.engine.finalize(); -}; - -/** - * [on description] - * - * @return this.engine - */ -Tar.prototype.on = function() { - return this.engine.on.apply(this.engine, arguments); -}; - -/** - * [pipe description] - * - * @param {String} destination - * @param {Object} options - * @return this.engine - */ -Tar.prototype.pipe = function(destination, options) { - if (this.compressor) { - return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options); - } else { - return this.engine.pipe.apply(this.engine, arguments); + /** + * [on description] + * + * @return this.engine + */ + on() { + return this.engine.on.apply(this.engine, arguments); } -}; - -/** - * [unpipe description] - * - * @return this.engine - */ -Tar.prototype.unpipe = function() { - if (this.compressor) { - return this.compressor.unpipe.apply(this.compressor, arguments); - } else { - return this.engine.unpipe.apply(this.engine, arguments); + /** + * [pipe description] + * + * @param {String} destination + * @param {Object} options + * @return this.engine + */ + pipe(destination, options) { + if (this.compressor) { + return this.engine.pipe + .apply(this.engine, [this.compressor]) + .pipe(destination, options); + } else { + return this.engine.pipe.apply(this.engine, arguments); + } } -}; - -module.exports = Tar; - -/** - * @typedef {Object} TarOptions - * @global - * @property {Boolean} [gzip=false] Compress the tar archive using gzip. - * @property {Object} [gzipOptions] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - * to control compression. - * @property {*} [*] See [tar-stream]{@link https://github.com/mafintosh/tar-stream} documentation for additional properties. - */ - -/** - * @typedef {Object} TarEntryData - * @global - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - */ - -/** - * TarStream Module - * @external TarStream - * @see {@link https://github.com/mafintosh/tar-stream} - */ + /** + * [unpipe description] + * + * @return this.engine + */ + unpipe() { + if (this.compressor) { + return this.compressor.unpipe.apply(this.compressor, arguments); + } else { + return this.engine.unpipe.apply(this.engine, arguments); + } + } +} diff --git a/lib/plugins/zip.js b/lib/plugins/zip.js index df6f0743..59bf5bb3 100644 --- a/lib/plugins/zip.js +++ b/lib/plugins/zip.js @@ -1,3 +1,5 @@ +import engine from "zip-stream"; + /** * ZIP Format Plugin * @@ -5,116 +7,66 @@ * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var engine = require('zip-stream'); -var util = require('archiver-utils'); - -/** - * @constructor - * @param {ZipOptions} [options] - * @param {String} [options.comment] Sets the zip archive comment. - * @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC. - * @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers. - * @param {Boolean} [options.namePrependSlash=false] Prepends a forward slash to archive file paths. - * @param {Boolean} [options.store=false] Sets the compression method to STORE. - * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - */ -var Zip = function(options) { - if (!(this instanceof Zip)) { - return new Zip(options); +export default class Zip { + /** + * @constructor + * @param {ZipOptions} [options] + * @param {String} [options.comment] Sets the zip archive comment. + * @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC. + * @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers. + * @param {Boolean} [options.namePrependSlash=false] Prepends a forward slash to archive file paths. + * @param {Boolean} [options.store=false] Sets the compression method to STORE. + * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} + */ + constructor(options) { + options = this.options = { + comment: "", + forceUTC: false, + namePrependSlash: false, + store: false, + ...options, + }; + this.engine = new engine(options); } - - options = this.options = util.defaults(options, { - comment: '', - forceUTC: false, - namePrependSlash: false, - store: false - }); - - this.supports = { - directory: true, - symlink: true - }; - - this.engine = new engine(options); -}; - -/** - * @param {(Buffer|Stream)} source - * @param {ZipEntryData} data - * @param {String} data.name Sets the entry name including internal path. - * @param {(String|Date)} [data.date=NOW()] Sets the entry date. - * @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions. - * @param {String} [data.prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - * @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE. - * @param {Function} callback - * @return void - */ -Zip.prototype.append = function(source, data, callback) { - this.engine.entry(source, data, callback); -}; - -/** - * @return void - */ -Zip.prototype.finalize = function() { - this.engine.finalize(); -}; - -/** - * @return this.engine - */ -Zip.prototype.on = function() { - return this.engine.on.apply(this.engine, arguments); -}; - -/** - * @return this.engine - */ -Zip.prototype.pipe = function() { - return this.engine.pipe.apply(this.engine, arguments); -}; - -/** - * @return this.engine - */ -Zip.prototype.unpipe = function() { - return this.engine.unpipe.apply(this.engine, arguments); -}; - -module.exports = Zip; - -/** - * @typedef {Object} ZipOptions - * @global - * @property {String} [comment] Sets the zip archive comment. - * @property {Boolean} [forceLocalTime=false] Forces the archive to contain local file times instead of UTC. - * @property {Boolean} [forceZip64=false] Forces the archive to contain ZIP64 headers. - * @prpperty {Boolean} [namePrependSlash=false] Prepends a forward slash to archive file paths. - * @property {Boolean} [store=false] Sets the compression method to STORE. - * @property {Object} [zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - * to control compression. - * @property {*} [*] See [zip-stream]{@link https://archiverjs.com/zip-stream/ZipStream.html} documentation for current list of properties. - */ - -/** - * @typedef {Object} ZipEntryData - * @global - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {Boolean} [namePrependSlash=ZipOptions.namePrependSlash] Prepends a forward slash to archive file paths. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - * @property {Boolean} [store=ZipOptions.store] Sets the compression method to STORE. - */ - -/** - * ZipStream Module - * @external ZipStream - * @see {@link https://www.archiverjs.com/zip-stream/ZipStream.html} - */ + /** + * @param {(Buffer|Stream)} source + * @param {ZipEntryData} data + * @param {String} data.name Sets the entry name including internal path. + * @param {(String|Date)} [data.date=NOW()] Sets the entry date. + * @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions. + * @param {String} [data.prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. + * @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE. + * @param {Function} callback + * @return void + */ + append(source, data, callback) { + this.engine.entry(source, data, callback); + } + /** + * @return void + */ + finalize() { + this.engine.finalize(); + } + /** + * @return this.engine + */ + on() { + return this.engine.on.apply(this.engine, arguments); + } + /** + * @return this.engine + */ + pipe() { + return this.engine.pipe.apply(this.engine, arguments); + } + /** + * @return this.engine + */ + unpipe() { + return this.engine.unpipe.apply(this.engine, arguments); + } +} diff --git a/lib/utils.js b/lib/utils.js new file mode 100644 index 00000000..b8e7e180 --- /dev/null +++ b/lib/utils.js @@ -0,0 +1,66 @@ +import normalizePath from "normalize-path"; +import { PassThrough } from "readable-stream"; +import { isStream } from "is-stream"; + +export function collectStream(source, callback) { + var collection = []; + var size = 0; + + source.on("error", callback); + + source.on("data", function (chunk) { + collection.push(chunk); + size += chunk.length; + }); + + source.on("end", function () { + var buf = Buffer.alloc(size); + var offset = 0; + + collection.forEach(function (data) { + data.copy(buf, offset); + offset += data.length; + }); + + callback(null, buf); + }); +} + +export function dateify(dateish) { + dateish = dateish || new Date(); + + if (dateish instanceof Date) { + dateish = dateish; + } else if (typeof dateish === "string") { + dateish = new Date(dateish); + } else { + dateish = new Date(); + } + + return dateish; +} + +export function normalizeInputSource(source) { + if (source === null) { + return Buffer.alloc(0); + } else if (typeof source === "string") { + return Buffer.from(source); + } else if (isStream(source)) { + // Always pipe through a PassThrough stream to guarantee pausing the stream if it's already flowing, + // since it will only be processed in a (distant) future iteration of the event loop, and will lose + // data if already flowing now. + return source.pipe(new PassThrough()); + } + + return source; +} + +export function sanitizePath(filepath) { + return normalizePath(filepath, false) + .replace(/^\w+:/, "") + .replace(/^(\.\.\/|\/)+/, ""); +} + +export function trailingSlashIt(str) { + return str.slice(-1) !== "/" ? str + "/" : str; +} diff --git a/package-lock.json b/package-lock.json index ee089e24..35d46382 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,13 +9,15 @@ "version": "7.0.1", "license": "MIT", "dependencies": { - "archiver-utils": "^5.0.2", "async": "^3.2.4", "buffer-crc32": "^1.0.0", + "is-stream": "^3.0.0", + "lazystream": "^1.0.0", + "normalize-path": "^3.0.0", "readable-stream": "^4.0.0", - "readdir-glob": "^2.0.0", + "readdir-glob": "^1.1.3", "tar-stream": "^3.0.0", - "zip-stream": "^6.0.1" + "zip-stream": "^7.0.2" }, "devDependencies": { "archiver-jsdoc-theme": "1.1.3", @@ -23,13 +25,14 @@ "jsdoc": "4.0.3", "mkdirp": "3.0.1", "mocha": "10.7.3", + "prettier": "3.3.3", "rimraf": "5.0.10", "stream-bench": "0.1.2", "tar": "6.2.1", "yauzl": "3.1.3" }, "engines": { - "node": ">= 14" + "node": ">=18" } }, "node_modules/@babel/helper-string-parser": { @@ -83,6 +86,7 @@ "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", @@ -111,6 +115,7 @@ "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, "optional": true, "engines": { "node": ">=14" @@ -162,6 +167,7 @@ "version": "6.1.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, "engines": { "node": ">=12" }, @@ -173,6 +179,7 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "dependencies": { "color-convert": "^2.0.1" }, @@ -205,23 +212,6 @@ "lodash": "^4.17.19" } }, - "node_modules/archiver-utils": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", - "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", - "dependencies": { - "glob": "^10.0.0", - "graceful-fs": "^4.2.0", - "is-stream": "^2.0.1", - "lazystream": "^1.0.0", - "lodash": "^4.17.15", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - }, - "engines": { - "node": ">= 14" - } - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -540,6 +530,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "dependencies": { "color-name": "~1.1.4" }, @@ -550,21 +541,33 @@ "node_modules/color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "node_modules/compress-commons": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", - "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-7.0.0.tgz", + "integrity": "sha512-8WWFRMWaa37dwjWCxDcmdx6sxfjQTAEQ6s96BWqX9WYC6Mgg95EvwPYS/7QGX3txkst7TD1jIL2HCY9AixLGfA==", "dependencies": { "crc-32": "^1.2.0", - "crc32-stream": "^6.0.0", - "is-stream": "^2.0.1", + "crc32-stream": "^7.0.1", + "is-stream": "^4.0.0", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" }, "engines": { - "node": ">= 14" + "node": ">=18" + } + }, + "node_modules/compress-commons/node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/core-util-is": { @@ -584,21 +587,22 @@ } }, "node_modules/crc32-stream": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", - "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-7.0.1.tgz", + "integrity": "sha512-IBWsY8xznyQrcHn8h4bC8/4ErNke5elzgG8GcqF4RFPw6aHkWWRc7Tgw6upjaTX/CT/yQgqYENkxYsTYN+hW2g==", "dependencies": { "crc-32": "^1.2.0", "readable-stream": "^4.0.0" }, "engines": { - "node": ">= 14" + "node": ">=18" } }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -661,12 +665,14 @@ "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true }, "node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true }, "node_modules/entities": { "version": "4.5.0", @@ -760,6 +766,7 @@ "version": "3.3.0", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "dev": true, "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^4.0.1" @@ -837,6 +844,7 @@ "version": "10.4.5", "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", @@ -867,7 +875,8 @@ "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true }, "node_modules/has-flag": { "version": "4.0.0", @@ -947,6 +956,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, "engines": { "node": ">=8" } @@ -982,11 +992,11 @@ } }, "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", "engines": { - "node": ">=8" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -1012,12 +1022,14 @@ "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true }, "node_modules/jackspeak": { "version": "3.4.3", "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, "dependencies": { "@isaacs/cliui": "^8.0.2" }, @@ -1164,7 +1176,8 @@ "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true }, "node_modules/log-symbols": { "version": "4.1.0", @@ -1194,7 +1207,8 @@ "node_modules/lru-cache": { "version": "10.4.3", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==" + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true }, "node_modules/markdown-it": { "version": "14.1.0", @@ -1245,6 +1259,7 @@ "version": "9.0.5", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, "dependencies": { "brace-expansion": "^2.0.1" }, @@ -1259,6 +1274,7 @@ "version": "7.1.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, "engines": { "node": ">=16 || 14 >=14.17" } @@ -1438,7 +1454,8 @@ "node_modules/package-json-from-dist": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==" + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true }, "node_modules/path-exists": { "version": "4.0.0", @@ -1453,6 +1470,7 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, "engines": { "node": ">=8" } @@ -1461,6 +1479,7 @@ "version": "1.11.1", "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" @@ -1499,6 +1518,21 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -1551,14 +1585,22 @@ } }, "node_modules/readdir-glob": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-2.0.0.tgz", - "integrity": "sha512-Wlx1ZhthH6jlb7WgcvamQ/HRhJJhaC48565pH2LdYGZxh6B5rNeyYptuz6HrDjKn6Pb0a35jxd/u7ecRQ3GKrQ==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", "dependencies": { - "minimatch": "^9.0.0" + "minimatch": "^5.1.0" + } + }, + "node_modules/readdir-glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" }, - "funding": { - "url": "https://github.com/sponsors/yqnn" + "engines": { + "node": ">=10" } }, "node_modules/readdirp": { @@ -1638,6 +1680,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, "dependencies": { "shebang-regex": "^3.0.0" }, @@ -1649,6 +1692,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, "engines": { "node": ">=8" } @@ -1657,6 +1701,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, "engines": { "node": ">=14" }, @@ -1704,6 +1749,7 @@ "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", @@ -1721,6 +1767,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -1734,6 +1781,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "engines": { "node": ">=8" } @@ -1741,12 +1789,14 @@ "node_modules/string-width-cjs/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "node_modules/string-width-cjs/node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -1758,6 +1808,7 @@ "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, "dependencies": { "ansi-regex": "^6.0.1" }, @@ -1773,6 +1824,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -1784,6 +1836,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "engines": { "node": ">=8" } @@ -1922,6 +1975,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, "dependencies": { "isexe": "^2.0.0" }, @@ -1942,6 +1996,7 @@ "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", @@ -1959,6 +2014,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -1975,6 +2031,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "engines": { "node": ">=8" } @@ -1982,12 +2039,14 @@ "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "node_modules/wrap-ansi-cjs/node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -2001,6 +2060,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -2012,6 +2072,7 @@ "version": "6.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, "engines": { "node": ">=12" }, @@ -2164,16 +2225,16 @@ } }, "node_modules/zip-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", - "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-7.0.2.tgz", + "integrity": "sha512-g1TjcvzTXLWwDDyZSdC+w7tNdeNCq/qA8Amm8kxGBldyW2yxtSHHlYinxTRvlcaE4Tt3l1ZPsWSA+P9sn20MRw==", "dependencies": { - "archiver-utils": "^5.0.0", - "compress-commons": "^6.0.2", + "compress-commons": "^7.0.0", + "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" }, "engines": { - "node": ">= 14" + "node": ">=18" } } }, @@ -2214,6 +2275,7 @@ "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, "requires": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", @@ -2236,6 +2298,7 @@ "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, "optional": true }, "@types/linkify-it": { @@ -2277,12 +2340,14 @@ "ansi-regex": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==" + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "requires": { "color-convert": "^2.0.1" } @@ -2306,20 +2371,6 @@ "lodash": "^4.17.19" } }, - "archiver-utils": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", - "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", - "requires": { - "glob": "^10.0.0", - "graceful-fs": "^4.2.0", - "is-stream": "^2.0.1", - "lazystream": "^1.0.0", - "lodash": "^4.17.15", - "normalize-path": "^3.0.0", - "readable-stream": "^4.0.0" - } - }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -2549,6 +2600,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "requires": { "color-name": "~1.1.4" } @@ -2556,18 +2608,26 @@ "color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true }, "compress-commons": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", - "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-7.0.0.tgz", + "integrity": "sha512-8WWFRMWaa37dwjWCxDcmdx6sxfjQTAEQ6s96BWqX9WYC6Mgg95EvwPYS/7QGX3txkst7TD1jIL2HCY9AixLGfA==", "requires": { "crc-32": "^1.2.0", - "crc32-stream": "^6.0.0", - "is-stream": "^2.0.1", + "crc32-stream": "^7.0.1", + "is-stream": "^4.0.0", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" + }, + "dependencies": { + "is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==" + } } }, "core-util-is": { @@ -2581,9 +2641,9 @@ "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==" }, "crc32-stream": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", - "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-7.0.1.tgz", + "integrity": "sha512-IBWsY8xznyQrcHn8h4bC8/4ErNke5elzgG8GcqF4RFPw6aHkWWRc7Tgw6upjaTX/CT/yQgqYENkxYsTYN+hW2g==", "requires": { "crc-32": "^1.2.0", "readable-stream": "^4.0.0" @@ -2593,6 +2653,7 @@ "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, "requires": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -2632,12 +2693,14 @@ "eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true }, "emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true }, "entities": { "version": "4.5.0", @@ -2701,6 +2764,7 @@ "version": "3.3.0", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "dev": true, "requires": { "cross-spawn": "^7.0.0", "signal-exit": "^4.0.1" @@ -2755,6 +2819,7 @@ "version": "10.4.5", "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, "requires": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", @@ -2776,7 +2841,8 @@ "graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true }, "has-flag": { "version": "4.0.0", @@ -2828,7 +2894,8 @@ "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true }, "is-glob": { "version": "4.0.3", @@ -2852,9 +2919,9 @@ "dev": true }, "is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==" }, "is-unicode-supported": { "version": "0.1.0", @@ -2870,12 +2937,14 @@ "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true }, "jackspeak": { "version": "3.4.3", "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, "requires": { "@isaacs/cliui": "^8.0.2", "@pkgjs/parseargs": "^0.11.0" @@ -2997,7 +3066,8 @@ "lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true }, "log-symbols": { "version": "4.1.0", @@ -3021,7 +3091,8 @@ "lru-cache": { "version": "10.4.3", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==" + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true }, "markdown-it": { "version": "14.1.0", @@ -3060,6 +3131,7 @@ "version": "9.0.5", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, "requires": { "brace-expansion": "^2.0.1" } @@ -3067,7 +3139,8 @@ "minipass": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true }, "minizlib": { "version": "2.1.2", @@ -3195,7 +3268,8 @@ "package-json-from-dist": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==" + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true }, "path-exists": { "version": "4.0.0", @@ -3206,12 +3280,14 @@ "path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true }, "path-scurry": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, "requires": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" @@ -3235,6 +3311,12 @@ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, + "prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "dev": true + }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -3278,11 +3360,21 @@ } }, "readdir-glob": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-2.0.0.tgz", - "integrity": "sha512-Wlx1ZhthH6jlb7WgcvamQ/HRhJJhaC48565pH2LdYGZxh6B5rNeyYptuz6HrDjKn6Pb0a35jxd/u7ecRQ3GKrQ==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", "requires": { - "minimatch": "^9.0.0" + "minimatch": "^5.1.0" + }, + "dependencies": { + "minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "requires": { + "brace-expansion": "^2.0.1" + } + } } }, "readdirp": { @@ -3336,6 +3428,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, "requires": { "shebang-regex": "^3.0.0" } @@ -3343,12 +3436,14 @@ "shebang-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true }, "signal-exit": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true }, "stream-bench": { "version": "0.1.2", @@ -3390,6 +3485,7 @@ "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, "requires": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", @@ -3400,6 +3496,7 @@ "version": "npm:string-width@4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -3409,17 +3506,20 @@ "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true }, "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "requires": { "ansi-regex": "^5.0.1" } @@ -3430,6 +3530,7 @@ "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, "requires": { "ansi-regex": "^6.0.1" } @@ -3438,6 +3539,7 @@ "version": "npm:strip-ansi@6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "requires": { "ansi-regex": "^5.0.1" }, @@ -3445,7 +3547,8 @@ "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true } } }, @@ -3552,6 +3655,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, "requires": { "isexe": "^2.0.0" } @@ -3566,6 +3670,7 @@ "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, "requires": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", @@ -3575,7 +3680,8 @@ "ansi-styles": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true } } }, @@ -3583,6 +3689,7 @@ "version": "npm:wrap-ansi@7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, "requires": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -3592,17 +3699,20 @@ "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true }, "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -3613,6 +3723,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "requires": { "ansi-regex": "^5.0.1" } @@ -3735,12 +3846,12 @@ "dev": true }, "zip-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", - "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-7.0.2.tgz", + "integrity": "sha512-g1TjcvzTXLWwDDyZSdC+w7tNdeNCq/qA8Amm8kxGBldyW2yxtSHHlYinxTRvlcaE4Tt3l1ZPsWSA+P9sn20MRw==", "requires": { - "archiver-utils": "^5.0.0", - "compress-commons": "^6.0.2", + "compress-commons": "^7.0.0", + "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } } diff --git a/package.json b/package.json index c98d54a9..d6d084b2 100644 --- a/package.json +++ b/package.json @@ -15,26 +15,29 @@ "url": "https://github.com/archiverjs/node-archiver/issues" }, "license": "MIT", - "main": "index.js", + "type": "module", + "exports": "./index.js", "files": [ "index.js", "lib" ], "engines": { - "node": ">= 14" + "node": ">=18" }, "scripts": { "test": "mocha --reporter dot", "bench": "node benchmark/simple/pack-zip.js" }, "dependencies": { - "archiver-utils": "^5.0.2", "async": "^3.2.4", "buffer-crc32": "^1.0.0", + "is-stream": "^3.0.0", + "lazystream": "^1.0.0", + "normalize-path": "^3.0.0", "readable-stream": "^4.0.0", - "readdir-glob": "^2.0.0", + "readdir-glob": "^1.1.3", "tar-stream": "^3.0.0", - "zip-stream": "^6.0.1" + "zip-stream": "^7.0.2" }, "devDependencies": { "archiver-jsdoc-theme": "1.1.3", @@ -42,6 +45,7 @@ "jsdoc": "4.0.3", "mkdirp": "3.0.1", "mocha": "10.7.3", + "prettier": "3.3.3", "rimraf": "5.0.10", "stream-bench": "0.1.2", "tar": "6.2.1", diff --git a/test/archiver.js b/test/archiver.js index 21ec91fa..20de9ad1 100644 --- a/test/archiver.js +++ b/test/archiver.js @@ -1,450 +1,450 @@ -/*global before,describe,it */ -var fs = require('fs'); -var PassThrough = require('readable-stream').PassThrough; -var Readable = require('readable-stream').Readable; -var WriteStream = fs.createWriteStream; - -var assert = require('chai').assert; -var mkdir = require('mkdirp'); - -var helpers = require('./helpers'); -var HashStream = helpers.HashStream; -var UnBufferedStream = helpers.UnBufferedStream; -var WriteHashStream = helpers.WriteHashStream; -var binaryBuffer = helpers.binaryBuffer; - -var archiver = require('../'); +import { + WriteStream, + chmodSync, + createReadStream, + createWriteStream, + statSync, + symlinkSync, + unlinkSync, + writeFileSync, +} from "fs"; +import { PassThrough } from "readable-stream"; +import { Readable } from "readable-stream"; +import { assert } from "chai"; +import { mkdirp } from "mkdirp"; +import { + binaryBuffer, + readJSON, + UnBufferedStream, + WriteHashStream, +} from "./helpers/index.js"; +import { JsonArchive } from "../index.js"; var testBuffer = binaryBuffer(1024 * 16); +var testDate = new Date("Jan 03 2013 14:26:38 GMT"); +var testDate2 = new Date("Feb 10 2013 10:24:42 GMT"); +var win32 = process.platform === "win32"; -var testDate = new Date('Jan 03 2013 14:26:38 GMT'); -var testDate2 = new Date('Feb 10 2013 10:24:42 GMT'); - -var win32 = process.platform === 'win32'; - -describe('archiver', function() { - before(function() { - mkdir.sync('tmp'); - +describe("archiver", function () { + before(function () { + mkdirp.sync("tmp"); if (!win32) { - fs.chmodSync('test/fixtures/executable.sh', 0777); - fs.chmodSync('test/fixtures/directory/subdir/', 0755); - fs.symlinkSync('test/fixtures/directory/level0.txt', 'test/fixtures/directory/subdir/level0link.txt'); - fs.symlinkSync('test/fixtures/directory/subdir/subsub/', 'test/fixtures/directory/subdir/subsublink'); + chmodSync("test/fixtures/executable.sh", 511); // 0777 + chmodSync("test/fixtures/directory/subdir/", 493); // 0755 + symlinkSync( + "test/fixtures/directory/level0.txt", + "test/fixtures/directory/subdir/level0link.txt", + ); + symlinkSync( + "test/fixtures/directory/subdir/subsub/", + "test/fixtures/directory/subdir/subsublink", + ); } else { - fs.writeFileSync('test/fixtures/directory/subdir/level0link.txt', '../level0.txt'); - fs.writeFileSync('test/fixtures/directory/subdir/subsublink', 'subsub'); + writeFileSync( + "test/fixtures/directory/subdir/level0link.txt", + "../level0.txt", + ); + writeFileSync("test/fixtures/directory/subdir/subsublink", "subsub"); } }); - - after(function() { - fs.unlinkSync('test/fixtures/directory/subdir/level0link.txt'); - fs.unlinkSync('test/fixtures/directory/subdir/subsublink'); + after(function () { + unlinkSync("test/fixtures/directory/subdir/level0link.txt"); + unlinkSync("test/fixtures/directory/subdir/subsublink"); }); - - describe('core', function() { - var archive = archiver('json'); - - describe('#_normalizeEntryData', function() { - it('should support prefix of the entry name', function() { - var prefix1 = archive._normalizeEntryData({ name: 'entry.txt', prefix: 'prefix/' }); - assert.propertyVal(prefix1, 'name', 'prefix/entry.txt'); - - var prefix2 = archive._normalizeEntryData({ name: 'entry.txt', prefix: '' }); - assert.propertyVal(prefix2, 'name', 'entry.txt'); + describe("core", function () { + var archive = new JsonArchive(); + describe("#_normalizeEntryData", function () { + it("should support prefix of the entry name", function () { + var prefix1 = archive._normalizeEntryData({ + name: "entry.txt", + prefix: "prefix/", + }); + assert.propertyVal(prefix1, "name", "prefix/entry.txt"); + var prefix2 = archive._normalizeEntryData({ + name: "entry.txt", + prefix: "", + }); + assert.propertyVal(prefix2, "name", "entry.txt"); }); - - it('should support special bits on unix', function () { + it("should support special bits on unix", function () { if (!win32) { - var mode = archive._normalizeEntryData({ name: 'executable.sh', mode: fs.statSync('test/fixtures/executable.sh').mode }); - assert.propertyVal(mode, 'mode', 511); + var mode = archive._normalizeEntryData({ + name: "executable.sh", + mode: statSync("test/fixtures/executable.sh").mode, + }); + assert.propertyVal(mode, "mode", 511); } }); }); }); - - describe('api', function() { - describe('#abort', function() { - var archive; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/abort.json'); - - testStream.on('close', function() { + describe("api", function () { + describe("#abort", function () { + let archive; + before(function (done) { + archive = new JsonArchive(); + const testStream = new WriteStream("tmp/abort.json"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .file('test/fixtures/test.txt') + .append(testBuffer, { name: "buffer.txt", date: testDate }) + .append(createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }) + .file("test/fixtures/test.txt") .abort(); }); - - it('should have a state of aborted', function() { - assert.property(archive, '_state'); - assert.propertyVal(archive._state, 'aborted', true); + it("should have a state of aborted", function () { + assert.property(archive, "_state"); + assert.propertyVal(archive._state, "aborted", true); }); }); - - describe('#append', function() { + describe("#append", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/append.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/append.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = new JsonArchive(); + var testStream = new WriteStream("tmp/append.json"); + testStream.on("close", function () { + actual = readJSON("tmp/append.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .append(Readable.from(['test']), { name: 'stream-like.txt', date: testDate }) - .append(null, { name: 'directory/', date: testDate }) + .append(testBuffer, { name: "buffer.txt", date: testDate }) + .append(createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }) + .append(Readable.from(["test"]), { + name: "stream-like.txt", + date: testDate, + }) + .append(null, { name: "directory/", date: testDate }) .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); assert.lengthOf(actual, 4); }); - - it('should append buffer', function() { - assert.property(entries, 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'name', 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'type', 'file'); - assert.propertyVal(entries['buffer.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['buffer.txt'], 'mode', 420); - assert.propertyVal(entries['buffer.txt'], 'crc32', 3893830384); - assert.propertyVal(entries['buffer.txt'], 'size', 16384); + it("should append buffer", function () { + assert.property(entries, "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "name", "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "type", "file"); + assert.propertyVal( + entries["buffer.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["buffer.txt"], "mode", 420); + assert.propertyVal(entries["buffer.txt"], "crc32", 3893830384); + assert.propertyVal(entries["buffer.txt"], "size", 16384); }); - - it('should append stream', function() { - assert.property(entries, 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'name', 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'type', 'file'); - assert.propertyVal(entries['stream.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['stream.txt'], 'mode', 420); - assert.propertyVal(entries['stream.txt'], 'crc32', 585446183); - assert.propertyVal(entries['stream.txt'], 'size', 19); + it("should append stream", function () { + assert.property(entries, "stream.txt"); + assert.propertyVal(entries["stream.txt"], "name", "stream.txt"); + assert.propertyVal(entries["stream.txt"], "type", "file"); + assert.propertyVal( + entries["stream.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["stream.txt"], "mode", 420); + assert.propertyVal(entries["stream.txt"], "crc32", 585446183); + assert.propertyVal(entries["stream.txt"], "size", 19); }); - - it('should append stream-like source', function() { - assert.property(entries, 'stream-like.txt'); - assert.propertyVal(entries['stream-like.txt'], 'name', 'stream-like.txt'); - assert.propertyVal(entries['stream-like.txt'], 'type', 'file'); - assert.propertyVal(entries['stream-like.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['stream-like.txt'], 'mode', 420); - assert.propertyVal(entries['stream-like.txt'], 'crc32', 3632233996); - assert.propertyVal(entries['stream-like.txt'], 'size', 4); + it("should append stream-like source", function () { + assert.property(entries, "stream-like.txt"); + assert.propertyVal( + entries["stream-like.txt"], + "name", + "stream-like.txt", + ); + assert.propertyVal(entries["stream-like.txt"], "type", "file"); + assert.propertyVal( + entries["stream-like.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["stream-like.txt"], "mode", 420); + assert.propertyVal(entries["stream-like.txt"], "crc32", 3632233996); + assert.propertyVal(entries["stream-like.txt"], "size", 4); }); - - it('should append directory', function() { - assert.property(entries, 'directory/'); - assert.propertyVal(entries['directory/'], 'name', 'directory/'); - assert.propertyVal(entries['directory/'], 'type', 'directory'); - assert.propertyVal(entries['directory/'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['directory/'], 'mode', 493); - assert.propertyVal(entries['directory/'], 'crc32', 0); - assert.propertyVal(entries['directory/'], 'size', 0); + it("should append directory", function () { + assert.property(entries, "directory/"); + assert.propertyVal(entries["directory/"], "name", "directory/"); + assert.propertyVal(entries["directory/"], "type", "directory"); + assert.propertyVal( + entries["directory/"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["directory/"], "mode", 493); + assert.propertyVal(entries["directory/"], "crc32", 0); + assert.propertyVal(entries["directory/"], "size", 0); }); }); - - describe('#directory', function() { + describe("#directory", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/directory.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/directory.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = new JsonArchive(); + var testStream = new WriteStream("tmp/directory.json"); + testStream.on("close", function () { + actual = readJSON("tmp/directory.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive - .directory('test/fixtures/directory', null, { date: testDate }) - .directory('test/fixtures/directory', 'Win\\DS\\', { date: testDate }) - .directory('test/fixtures/directory', 'directory', function(data) { - if (data.name === 'ignore.txt') { + .directory("test/fixtures/directory", null, { date: testDate }) + .directory("test/fixtures/directory", "Win\\DS\\", { date: testDate }) + .directory("test/fixtures/directory", "directory", function (data) { + if (data.name === "ignore.txt") { return false; } - data.funcProp = true; return data; }) .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); - - assert.property(entries, 'test/fixtures/directory/level0.txt'); - assert.property(entries, 'test/fixtures/directory/subdir/'); - assert.property(entries, 'test/fixtures/directory/subdir/level1.txt'); - assert.property(entries, 'test/fixtures/directory/subdir/subsub/'); - assert.property(entries, 'test/fixtures/directory/subdir/subsub/level2.txt'); - assert.propertyVal(entries['test/fixtures/directory/level0.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['test/fixtures/directory/subdir/'], 'date', '2013-01-03T14:26:38.000Z'); - - assert.property(entries, 'directory/level0.txt'); - assert.property(entries, 'directory/subdir/'); - assert.property(entries, 'directory/subdir/level1.txt'); - assert.property(entries, 'directory/subdir/subsub/'); - assert.property(entries, 'directory/subdir/subsub/level2.txt'); + assert.property(entries, "test/fixtures/directory/level0.txt"); + assert.property(entries, "test/fixtures/directory/subdir/"); + assert.property(entries, "test/fixtures/directory/subdir/level1.txt"); + assert.property(entries, "test/fixtures/directory/subdir/subsub/"); + assert.property( + entries, + "test/fixtures/directory/subdir/subsub/level2.txt", + ); + assert.propertyVal( + entries["test/fixtures/directory/level0.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal( + entries["test/fixtures/directory/subdir/"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.property(entries, "directory/level0.txt"); + assert.property(entries, "directory/subdir/"); + assert.property(entries, "directory/subdir/level1.txt"); + assert.property(entries, "directory/subdir/subsub/"); + assert.property(entries, "directory/subdir/subsub/level2.txt"); }); - - it('should support setting data properties via function', function() { - assert.property(entries, 'directory/level0.txt'); - assert.propertyVal(entries['directory/level0.txt'], 'funcProp', true); + it("should support setting data properties via function", function () { + assert.property(entries, "directory/level0.txt"); + assert.propertyVal(entries["directory/level0.txt"], "funcProp", true); }); - - it('should support ignoring matches via function', function() { - assert.notProperty(entries, 'directory/ignore.txt'); + it("should support ignoring matches via function", function () { + assert.notProperty(entries, "directory/ignore.txt"); }); - - it('should find dot files', function() { - assert.property(entries, 'directory/.dotfile'); + it("should find dot files", function () { + assert.property(entries, "directory/.dotfile"); }); - - it('should retain symlinks', function() { - assert.property(entries, 'test/fixtures/directory/subdir/level0link.txt'); - assert.property(entries, 'directory/subdir/level0link.txt'); + it("should retain symlinks", function () { + assert.property( + entries, + "test/fixtures/directory/subdir/level0link.txt", + ); + assert.property(entries, "directory/subdir/level0link.txt"); }); - - it('should retain directory symlink', function() { - assert.property(entries, 'test/fixtures/directory/subdir/subsublink'); - assert.property(entries, 'directory/subdir/subsublink'); + it("should retain directory symlink", function () { + assert.property(entries, "test/fixtures/directory/subdir/subsublink"); + assert.property(entries, "directory/subdir/subsublink"); }); - - it('should handle windows path separators in prefix', function() { - assert.property(entries, 'Win/DS/level0.txt'); + it("should handle windows path separators in prefix", function () { + assert.property(entries, "Win/DS/level0.txt"); }); }); - - describe('#file', function() { + describe("#file", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/file.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/file.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = new JsonArchive(); + var testStream = new WriteStream("tmp/file.json"); + testStream.on("close", function () { + actual = readJSON("tmp/file.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive - .file('test/fixtures/test.txt', { name: 'test.txt', date: testDate }) - .file('test/fixtures/test.txt') - .file('test/fixtures/executable.sh', { mode: win32 ? 0777 : null }) + .file("test/fixtures/test.txt", { name: "test.txt", date: testDate }) + .file("test/fixtures/test.txt") + .file("test/fixtures/executable.sh", { mode: win32 ? 511 : null }) // 0777 .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); assert.lengthOf(actual, 3); }); - - it('should append filepath', function() { - assert.property(entries, 'test.txt'); - assert.propertyVal(entries['test.txt'], 'name', 'test.txt'); - assert.propertyVal(entries['test.txt'], 'date', '2013-01-03T14:26:38.000Z'); - assert.propertyVal(entries['test.txt'], 'crc32', 585446183); - assert.propertyVal(entries['test.txt'], 'size', 19); + it("should append filepath", function () { + assert.property(entries, "test.txt"); + assert.propertyVal(entries["test.txt"], "name", "test.txt"); + assert.propertyVal( + entries["test.txt"], + "date", + "2013-01-03T14:26:38.000Z", + ); + assert.propertyVal(entries["test.txt"], "crc32", 585446183); + assert.propertyVal(entries["test.txt"], "size", 19); }); - - it('should fallback to filepath when no name is set', function() { - assert.property(entries, 'test/fixtures/test.txt'); + it("should fallback to filepath when no name is set", function () { + assert.property(entries, "test/fixtures/test.txt"); }); - - it('should fallback to file stats when applicable', function() { - assert.property(entries, 'test/fixtures/executable.sh'); - assert.propertyVal(entries['test/fixtures/executable.sh'], 'name', 'test/fixtures/executable.sh'); - assert.propertyVal(entries['test/fixtures/executable.sh'], 'mode', 511); - assert.propertyVal(entries['test/fixtures/executable.sh'], 'crc32', 3957348457); - assert.propertyVal(entries['test/fixtures/executable.sh'], 'size', 11); + it("should fallback to file stats when applicable", function () { + assert.property(entries, "test/fixtures/executable.sh"); + assert.propertyVal( + entries["test/fixtures/executable.sh"], + "name", + "test/fixtures/executable.sh", + ); + assert.propertyVal(entries["test/fixtures/executable.sh"], "mode", 511); + assert.propertyVal( + entries["test/fixtures/executable.sh"], + "crc32", + 3957348457, + ); + assert.propertyVal(entries["test/fixtures/executable.sh"], "size", 11); }); }); - - describe('#glob', function() { + describe("#glob", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/glob.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/glob.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = new JsonArchive(); + var testStream = new WriteStream("tmp/glob.json"); + testStream.on("close", function () { + actual = readJSON("tmp/glob.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive - .glob('test/fixtures/test.txt', null ) - .glob('test/fixtures/empty.txt', null ) - .glob('test/fixtures/executable.sh', null ) - .glob('test/fixtures/directory/**/*', { ignore: 'test/fixtures/directory/subdir/**/*', nodir: true }) - .glob('**/*', { cwd: 'test/fixtures/directory/subdir/' }) + .glob("test/fixtures/test.txt", null) + .glob("test/fixtures/empty.txt", null) + .glob("test/fixtures/executable.sh", null) + .glob("test/fixtures/directory/**/*", { + ignore: "test/fixtures/directory/subdir/**/*", + nodir: true, + }) + .glob("**/*", { cwd: "test/fixtures/directory/subdir/" }) .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); - - assert.property(entries, 'test/fixtures/test.txt'); - assert.property(entries, 'test/fixtures/executable.sh'); - assert.property(entries, 'test/fixtures/empty.txt'); - - assert.property(entries, 'test/fixtures/directory/level0.txt'); - - assert.property(entries, 'level1.txt'); - assert.property(entries, 'subsub/level2.txt'); + assert.property(entries, "test/fixtures/test.txt"); + assert.property(entries, "test/fixtures/executable.sh"); + assert.property(entries, "test/fixtures/empty.txt"); + assert.property(entries, "test/fixtures/directory/level0.txt"); + assert.property(entries, "level1.txt"); + assert.property(entries, "subsub/level2.txt"); }); }); - - describe('#promise', function() { + describe("#promise", function () { var archive; - - it('should use a promise', function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/promise.json'); - + it("should use a promise", function (done) { + archive = new JsonArchive(); + var testStream = new WriteStream("tmp/promise.json"); archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .append(null, { name: 'directory/', date: testDate }) - .finalize() - .then(function() { - done() + .append(testBuffer, { name: "buffer.txt", date: testDate }) + .append(createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, }) + .append(null, { name: "directory/", date: testDate }) + .finalize() + .then(function () { + done(); + }); }); }); - - describe('#errors', function() { - var archive; - - it('should allow continue on stat failing', function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/errors-stat.json'); - - testStream.on('close', function() { - done(); - }); - - archive.pipe(testStream); - - archive - .file('test/fixtures/test.txt') - .file('test/fixtures/test-missing.txt') - .file('test/fixtures/empty.txt') - .finalize() + describe("#errors", function () { + var archive; + it("should allow continue on stat failing", function (done) { + archive = new JsonArchive(); + var testStream = new WriteStream("tmp/errors-stat.json"); + testStream.on("close", function () { + done(); }); - - it('should allow continue on with several stat failings', function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/errors-stat.json'); - - testStream.on('close', function() { - done(); - }); - - archive.pipe(testStream); - - archive.file('test/fixtures/test.txt'); - for (var i = 1; i <= 20; i++) - archive.file('test/fixtures/test-missing.txt'); - - archive.finalize() - }); - }); - - describe('#isRegisteredFormat', function () { - var isRegisteredFormat = archiver.isRegisteredFormat('zip'); - it('should return true when the value is present', function () { - assert.equal(true, isRegisteredFormat); + archive.pipe(testStream); + archive + .file("test/fixtures/test.txt") + .file("test/fixtures/test-missing.txt") + .file("test/fixtures/empty.txt") + .finalize(); + }); + it("should allow continue on with several stat failings", function (done) { + archive = new JsonArchive(); + var testStream = new WriteStream("tmp/errors-stat.json"); + testStream.on("close", function () { + done(); + }); + archive.pipe(testStream); + archive.file("test/fixtures/test.txt"); + for (var i = 1; i <= 20; i++) + archive.file("test/fixtures/test-missing.txt"); + archive.finalize(); }); }); - }); - - describe('#symlink', function() { + describe("#symlink", function () { var actual; var archive; var entries = {}; - - before(function(done) { - archive = archiver('json'); - var testStream = new WriteStream('tmp/symlink.json'); - - testStream.on('close', function() { - actual = helpers.readJSON('tmp/symlink.json'); - - actual.forEach(function(entry) { + before(function (done) { + archive = new JsonArchive(); + var testStream = new WriteStream("tmp/symlink.json"); + testStream.on("close", function () { + actual = readJSON("tmp/symlink.json"); + actual.forEach(function (entry) { entries[entry.name] = entry; }); - done(); }); - archive.pipe(testStream); - archive .append("file-a", { name: "file-a" }) .symlink("directory-a/symlink-to-file-a", "../file-a") - .symlink("directory-b/directory-c/symlink-to-directory-a", "../../directory-a", 493) + .symlink( + "directory-b/directory-c/symlink-to-directory-a", + "../../directory-a", + 493, + ) .finalize(); }); - - it('should append multiple entries', () => { + it("should append multiple entries", () => { assert.isArray(actual); - assert.property(entries, 'file-a'); - assert.property(entries, 'directory-a/symlink-to-file-a'); - assert.property(entries, 'directory-b/directory-c/symlink-to-directory-a'); - assert.propertyVal(entries['directory-b/directory-c/symlink-to-directory-a'], 'mode', 493); + assert.property(entries, "file-a"); + assert.property(entries, "directory-a/symlink-to-file-a"); + assert.property( + entries, + "directory-b/directory-c/symlink-to-directory-a", + ); + assert.propertyVal( + entries["directory-b/directory-c/symlink-to-directory-a"], + "mode", + 493, + ); }); }); }); diff --git a/test/helpers/index.js b/test/helpers/index.js index 05cb597a..d4a7ebe9 100644 --- a/test/helpers/index.js +++ b/test/helpers/index.js @@ -1,110 +1,78 @@ -var crypto = require('crypto'); -var fs = require('fs'); -var inherits = require('util').inherits; - -var Stream = require('stream').Stream; -var Readable = require('readable-stream').Readable; -var Writable = require('readable-stream').Writable; - -function adjustDateByOffset(d, offset) { - d = (d instanceof Date) ? d : new Date(); - +import crypto from "crypto"; +import { readFileSync, WriteStream } from "fs"; +import { inherits } from "util"; +import { Stream } from "stream"; +import { Readable, Writable } from "readable-stream"; + +export function adjustDateByOffset(d, offset) { + d = d instanceof Date ? d : new Date(); if (offset >= 1) { d.setMinutes(d.getMinutes() - offset); } else { d.setMinutes(d.getMinutes() + Math.abs(offset)); } - return d; } -module.exports.adjustDateByOffset = adjustDateByOffset; - -function binaryBuffer(n) { +export function binaryBuffer(n) { var buffer = Buffer.alloc(n); - for (var i = 0; i < n; i++) { - buffer.writeUInt8(i&255, i); + buffer.writeUInt8(i & 255, i); } - return buffer; } -module.exports.binaryBuffer = binaryBuffer; - function BinaryStream(size, options) { Readable.call(this, options); - var buf = Buffer.alloc(size); - for (var i = 0; i < size; i++) { - buf.writeUInt8(i&255, i); + buf.writeUInt8(i & 255, i); } - this.push(buf); this.push(null); } - inherits(BinaryStream, Readable); - -BinaryStream.prototype._read = function(size) {}; - -module.exports.BinaryStream = BinaryStream; - +BinaryStream.prototype._read = function (size) {}; function DeadEndStream(options) { Writable.call(this, options); } - inherits(DeadEndStream, Writable); - -DeadEndStream.prototype._write = function(chuck, encoding, callback) { +DeadEndStream.prototype._write = function (chuck, encoding, callback) { callback(); }; -module.exports.DeadEndStream = DeadEndStream; - -function readJSON(filepath) { +export function readJSON(filepath) { var contents; - try { - contents = fs.readFileSync(String(filepath)); + contents = readFileSync(String(filepath)); contents = JSON.parse(contents); - } catch(e) { + } catch (e) { contents = null; } - return contents; } -module.exports.readJSON = readJSON; - function UnBufferedStream() { this.readable = true; } - inherits(UnBufferedStream, Stream); - -module.exports.UnBufferedStream = UnBufferedStream; - function WriteHashStream(path, options) { - fs.WriteStream.call(this, path, options); - - this.hash = crypto.createHash('sha1'); + WriteStream.call(this, path, options); + this.hash = crypto.createHash("sha1"); this.digest = null; - - this.on('close', function() { - this.digest = this.hash.digest('hex'); + this.on("close", function () { + this.digest = this.hash.digest("hex"); }); } - -inherits(WriteHashStream, fs.WriteStream); - -WriteHashStream.prototype.write = function(chunk) { +inherits(WriteHashStream, WriteStream); +WriteHashStream.prototype.write = function (chunk) { if (chunk) { this.hash.update(chunk); } - - return fs.WriteStream.prototype.write.call(this, chunk); + return WriteStream.prototype.write.call(this, chunk); }; -module.exports.WriteHashStream = WriteHashStream; \ No newline at end of file +export { BinaryStream }; +export { DeadEndStream }; +export { UnBufferedStream }; +export { WriteHashStream }; diff --git a/test/plugins.js b/test/plugins.js index ba128a8c..494eb128 100644 --- a/test/plugins.js +++ b/test/plugins.js @@ -1,51 +1,54 @@ -/*global before,describe,it */ -var fs = require('fs'); -var assert = require('chai').assert; -var mkdir = require('mkdirp'); -var tar = require('tar'); -var yauzl = require('yauzl'); -var WriteStream = fs.createWriteStream; - -var archiver = require('../'); -var helpers = require('./helpers'); -var binaryBuffer = helpers.binaryBuffer; - -var testBuffer = binaryBuffer(1024 * 16); -var testDate = new Date('Jan 03 2013 14:26:38 GMT'); -var testDate2 = new Date('Feb 10 2013 10:24:42 GMT'); - -var win32 = process.platform === 'win32'; - -describe('plugins', function() { - before(function() { - mkdir.sync('tmp'); - +import { + chmodSync, + createReadStream, + symlinkSync, + unlinkSync, + writeFileSync, + WriteStream, +} from "fs"; +import { assert } from "chai"; +import { mkdirp } from "mkdirp"; +import tar from "tar"; +import yauzl from "yauzl"; +import { TarArchive, ZipArchive } from "../index.js"; +import { binaryBuffer } from "./helpers/index.js"; + +const testBuffer = binaryBuffer(1024 * 16); +const testDate = new Date("Jan 03 2013 14:26:38 GMT"); +const testDate2 = new Date("Feb 10 2013 10:24:42 GMT"); +const win32 = process.platform === "win32"; + +describe("plugins", function () { + before(function () { + mkdirp.sync("tmp"); if (!win32) { - fs.chmodSync('test/fixtures/executable.sh', 0777); - fs.chmodSync('test/fixtures/directory/subdir/', 0755); - fs.symlinkSync('../level0.txt', 'test/fixtures/directory/subdir/level0link.txt'); - fs.symlinkSync('subsub/', 'test/fixtures/directory/subdir/subsublink'); + chmodSync("test/fixtures/executable.sh", 511); // 0777 + chmodSync("test/fixtures/directory/subdir/", 493); // 0755 + symlinkSync( + "../level0.txt", + "test/fixtures/directory/subdir/level0link.txt", + ); + symlinkSync("subsub/", "test/fixtures/directory/subdir/subsublink"); } else { - fs.writeFileSync('test/fixtures/directory/subdir/level0link.txt', '../level0.txt'); - fs.writeFileSync('test/fixtures/directory/subdir/subsublink', 'subsub'); + writeFileSync( + "test/fixtures/directory/subdir/level0link.txt", + "../level0.txt", + ); + writeFileSync("test/fixtures/directory/subdir/subsublink", "subsub"); } }); - - after(function() { - fs.unlinkSync('test/fixtures/directory/subdir/level0link.txt'); - fs.unlinkSync('test/fixtures/directory/subdir/subsublink'); + after(function () { + unlinkSync("test/fixtures/directory/subdir/level0link.txt"); + unlinkSync("test/fixtures/directory/subdir/subsublink"); }); - - describe('tar', function() { + describe("tar", function () { var actual = []; var archive; var entries = {}; - - before(function(done) { - archive = archiver('tar'); + before(function (done) { + archive = new TarArchive(); var testStream = new tar.Parse(); - - testStream.on('entry', function(entry) { + testStream.on("entry", function (entry) { actual.push(entry.path); entries[entry.path] = { type: entry.type, @@ -59,169 +62,199 @@ describe('plugins', function() { mtime: entry.mtime, atime: entry.atime, ctime: entry.ctime, - linkpath: entry.linkpath + linkpath: entry.linkpath, }; entry.resume(); }); - - testStream.on('end', function() { + testStream.on("end", function () { done(); }); - archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .append(null, { name: 'folder/', date: testDate }) - .directory('test/fixtures/directory', 'directory') - .symlink('manual-link.txt', 'manual-link-target.txt') + .append(testBuffer, { name: "buffer.txt", date: testDate }) + .append(createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }) + .append(null, { name: "folder/", date: testDate }) + .directory("test/fixtures/directory", "directory") + .symlink("manual-link.txt", "manual-link-target.txt") .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); assert.isAbove(actual.length, 10); }); - - it('should append buffer', function() { - assert.property(entries, 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'path', 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'type', 'File'); - assert.propertyVal(entries['buffer.txt'], 'mode', 420); - assert.propertyVal(entries['buffer.txt'], 'size', 16384); + it("should append buffer", function () { + assert.property(entries, "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "path", "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "type", "File"); + assert.propertyVal(entries["buffer.txt"], "mode", 420); + assert.propertyVal(entries["buffer.txt"], "size", 16384); }); - - it('should append stream', function() { - assert.property(entries, 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'path', 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'type', 'File'); - assert.propertyVal(entries['stream.txt'], 'mode', 420); - assert.propertyVal(entries['stream.txt'], 'size', 19); + it("should append stream", function () { + assert.property(entries, "stream.txt"); + assert.propertyVal(entries["stream.txt"], "path", "stream.txt"); + assert.propertyVal(entries["stream.txt"], "type", "File"); + assert.propertyVal(entries["stream.txt"], "mode", 420); + assert.propertyVal(entries["stream.txt"], "size", 19); }); - - it('should append folder', function() { - assert.property(entries, 'folder/'); - assert.propertyVal(entries['folder/'], 'path', 'folder/'); - assert.propertyVal(entries['folder/'], 'type', 'Directory'); - assert.propertyVal(entries['folder/'], 'mode', 493); - assert.propertyVal(entries['folder/'], 'size', 0); + it("should append folder", function () { + assert.property(entries, "folder/"); + assert.propertyVal(entries["folder/"], "path", "folder/"); + assert.propertyVal(entries["folder/"], "type", "Directory"); + assert.propertyVal(entries["folder/"], "mode", 493); + assert.propertyVal(entries["folder/"], "size", 0); }); - - it('should append manual symlink', function() { - assert.property(entries, 'manual-link.txt'); - assert.propertyVal(entries['manual-link.txt'], 'type', 'SymbolicLink'); - assert.propertyVal(entries['manual-link.txt'], 'linkpath', 'manual-link-target.txt'); + it("should append manual symlink", function () { + assert.property(entries, "manual-link.txt"); + assert.propertyVal(entries["manual-link.txt"], "type", "SymbolicLink"); + assert.propertyVal( + entries["manual-link.txt"], + "linkpath", + "manual-link-target.txt", + ); }); - - it('should append via directory', function() { - assert.property(entries, 'directory/subdir/level1.txt'); - assert.property(entries, 'directory/subdir/level0link.txt'); + it("should append via directory", function () { + assert.property(entries, "directory/subdir/level1.txt"); + assert.property(entries, "directory/subdir/level0link.txt"); }); - - it('should retain symlinks via directory', function() { + it("should retain symlinks via directory", function () { if (win32) { this.skip(); } - - assert.property(entries, 'directory/subdir/level0link.txt'); - assert.propertyVal(entries['directory/subdir/level0link.txt'], 'type', 'SymbolicLink'); - assert.propertyVal(entries['directory/subdir/level0link.txt'], 'linkpath', '../level0.txt'); - - assert.property(entries, 'directory/subdir/subsublink'); - assert.propertyVal(entries['directory/subdir/subsublink'], 'type', 'SymbolicLink'); - assert.propertyVal(entries['directory/subdir/subsublink'], 'linkpath', 'subsub'); + assert.property(entries, "directory/subdir/level0link.txt"); + assert.propertyVal( + entries["directory/subdir/level0link.txt"], + "type", + "SymbolicLink", + ); + assert.propertyVal( + entries["directory/subdir/level0link.txt"], + "linkpath", + "../level0.txt", + ); + assert.property(entries, "directory/subdir/subsublink"); + assert.propertyVal( + entries["directory/subdir/subsublink"], + "type", + "SymbolicLink", + ); + assert.propertyVal( + entries["directory/subdir/subsublink"], + "linkpath", + "subsub", + ); }); }); - - describe('zip', function() { + describe("zip", function () { var actual = []; var archive; var entries = {}; - var zipComment = ''; - - before(function(done) { - archive = archiver('zip', { comment: 'archive comment' }); - var testStream = new WriteStream('tmp/plugin.zip'); - - testStream.on('close', function(entry) { - yauzl.open('tmp/plugin.zip', function(err, zip) { - zip.on('entry', function(entry) { + var zipComment = ""; + before(function (done) { + archive = new ZipArchive({ comment: "archive comment" }); + var testStream = new WriteStream("tmp/plugin.zip"); + testStream.on("close", function (entry) { + yauzl.open("tmp/plugin.zip", function (err, zip) { + zip.on("entry", function (entry) { actual.push(entry.fileName); entries[entry.fileName] = entry; }); - - zip.on('close', function() { + zip.on("close", function () { done(); }); - zipComment = zip.comment; }); }); - archive.pipe(testStream); - archive - .append(testBuffer, { name: 'buffer.txt', date: testDate, comment: 'entry comment' }) - .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }) - .file('test/fixtures/executable.sh', { name: 'executable.sh', mode: win32 ? 0777 : null }) - .directory('test/fixtures/directory', 'directory') - .symlink('manual-link.txt', 'manual-link-target.txt') + .append(testBuffer, { + name: "buffer.txt", + date: testDate, + comment: "entry comment", + }) + .append(createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }) + .file("test/fixtures/executable.sh", { + name: "executable.sh", + mode: win32 ? 511 : null, // 0777 + }) + .directory("test/fixtures/directory", "directory") + .symlink("manual-link.txt", "manual-link-target.txt") .finalize(); }); - - it('should append multiple entries', function() { + it("should append multiple entries", function () { assert.isArray(actual); assert.isAbove(actual.length, 10); }); - - it('should append buffer', function() { - assert.property(entries, 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'uncompressedSize', 16384); - assert.propertyVal(entries['buffer.txt'], 'crc32', 3893830384); + it("should append buffer", function () { + assert.property(entries, "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "uncompressedSize", 16384); + assert.propertyVal(entries["buffer.txt"], "crc32", 3893830384); }); - - it('should append stream', function() { - assert.property(entries, 'stream.txt'); - assert.propertyVal(entries['stream.txt'], 'uncompressedSize', 19); - assert.propertyVal(entries['stream.txt'], 'crc32', 585446183); + it("should append stream", function () { + assert.property(entries, "stream.txt"); + assert.propertyVal(entries["stream.txt"], "uncompressedSize", 19); + assert.propertyVal(entries["stream.txt"], "crc32", 585446183); }); - - it('should append via file', function() { - assert.property(entries, 'executable.sh'); - assert.propertyVal(entries['executable.sh'], 'uncompressedSize', 11); - assert.propertyVal(entries['executable.sh'], 'crc32', 3957348457); + it("should append via file", function () { + assert.property(entries, "executable.sh"); + assert.propertyVal(entries["executable.sh"], "uncompressedSize", 11); + assert.propertyVal(entries["executable.sh"], "crc32", 3957348457); }); - - it('should append via directory', function() { - assert.property(entries, 'directory/subdir/level1.txt'); - assert.propertyVal(entries['directory/subdir/level1.txt'], 'uncompressedSize', 6); - assert.propertyVal(entries['directory/subdir/level1.txt'], 'crc32', 133711013); + it("should append via directory", function () { + assert.property(entries, "directory/subdir/level1.txt"); + assert.propertyVal( + entries["directory/subdir/level1.txt"], + "uncompressedSize", + 6, + ); + assert.propertyVal( + entries["directory/subdir/level1.txt"], + "crc32", + 133711013, + ); }); - - it('should append manual symlink', function() { - assert.property(entries, 'manual-link.txt'); - assert.propertyVal(entries['manual-link.txt'], 'crc32', 1121667014); - assert.propertyVal(entries['manual-link.txt'], 'externalFileAttributes', 2684354592); + it("should append manual symlink", function () { + assert.property(entries, "manual-link.txt"); + assert.propertyVal(entries["manual-link.txt"], "crc32", 1121667014); + assert.propertyVal( + entries["manual-link.txt"], + "externalFileAttributes", + 2684354592, + ); }); - - it('should allow for custom unix mode', function() { - assert.property(entries, 'executable.sh'); - assert.propertyVal(entries['executable.sh'], 'externalFileAttributes', 2180972576); - assert.equal((entries['executable.sh'].externalFileAttributes >>> 16) & 0xFFF, 511); - - assert.property(entries, 'directory/subdir/'); - assert.propertyVal(entries['directory/subdir/'], 'externalFileAttributes', 1106051088); - assert.equal((entries['directory/subdir/'].externalFileAttributes >>> 16) & 0xFFF, 493); + it("should allow for custom unix mode", function () { + assert.property(entries, "executable.sh"); + assert.propertyVal( + entries["executable.sh"], + "externalFileAttributes", + 2180972576, + ); + assert.equal( + (entries["executable.sh"].externalFileAttributes >>> 16) & 0xfff, + 511, + ); + assert.property(entries, "directory/subdir/"); + assert.propertyVal( + entries["directory/subdir/"], + "externalFileAttributes", + 1106051088, + ); + assert.equal( + (entries["directory/subdir/"].externalFileAttributes >>> 16) & 0xfff, + 493, + ); }); - - it('should allow for entry comments', function() { - assert.property(entries, 'buffer.txt'); - assert.propertyVal(entries['buffer.txt'], 'fileComment', 'entry comment'); + it("should allow for entry comments", function () { + assert.property(entries, "buffer.txt"); + assert.propertyVal(entries["buffer.txt"], "fileComment", "entry comment"); }); - - it('should allow for archive comment', function() { - assert.equal('archive comment', zipComment); + it("should allow for archive comment", function () { + assert.equal("archive comment", zipComment); }); }); }); diff --git a/website/babel.config.js b/website/babel.config.js index e00595da..bfd75dbd 100644 --- a/website/babel.config.js +++ b/website/babel.config.js @@ -1,3 +1,3 @@ module.exports = { - presets: [require.resolve('@docusaurus/core/lib/babel/preset')], + presets: [require.resolve("@docusaurus/core/lib/babel/preset")], }; diff --git a/website/docs/archiver_api.md b/website/docs/archiver_api.md index 4651596a..17828be4 100644 --- a/website/docs/archiver_api.md +++ b/website/docs/archiver_api.md @@ -7,37 +7,37 @@ sidebar_label: "Archiver" ## Archiver Class ```js -new Archiver(format, options) +new Archiver(format, options); ``` ### constructor ##### Parameters -- `format` - *String* - The archive format to use. -- `options` - *Object* +- `format` - _String_ - The archive format to use. +- `options` - _Object_ #### Options -The `options` object may include the following properties as well as all [Stream.duplex options](https://nodejs.org/api/stream.html#stream_new_stream_duplex_options): +The `options` object may include the following properties as well as all [Stream.duplex options](https://nodejs.org/api/stream.html#stream_new_stream_duplex_options): ##### Core Options -- `statConcurrency` - *Number* (default 4) - Sets the number of workers used to process the internal fs stat queue. +- `statConcurrency` - _Number_ (default 4) - Sets the number of workers used to process the internal fs stat queue. ##### ZIP Options -- `comment` - *String* - Sets the zip archive comment. -- `forceLocalTime` - *Boolean* - Forces the archive to contain local file times instead of UTC. -- `forceZip64` - *Boolean* - Forces the archive to contain ZIP64 headers. -- `namePrependSlash` - *Boolean* - Prepends a forward slash to archive file paths. -- `store` - *Boolean* - Sets the compression method to STORE. -- `zlib` - *Object* - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. +- `comment` - _String_ - Sets the zip archive comment. +- `forceLocalTime` - _Boolean_ - Forces the archive to contain local file times instead of UTC. +- `forceZip64` - _Boolean_ - Forces the archive to contain ZIP64 headers. +- `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. +- `store` - _Boolean_ - Sets the compression method to STORE. +- `zlib` - _Object_ - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. ##### TAR Options -- `gzip` - *Boolean* - Compress the tar archive using gzip. -- `gzipOptions` - *Object* - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. +- `gzip` - _Boolean_ - Compress the tar archive using gzip. +- `gzipOptions` - _Object_ - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. See [tar-stream](https://www.npmjs.com/package/tar-stream) documentation for additional properties. @@ -51,10 +51,10 @@ abort() → {this} Aborts the archiving process, taking a best-effort approach, by: -* removing any pending queue tasks -* allowing any active queue workers to finish -* detaching internal module pipes -* ending both sides of the Transform stream +- removing any pending queue tasks +- allowing any active queue workers to finish +- detaching internal module pipes +- ending both sides of the Transform stream It will NOT drain any remaining sources. @@ -76,8 +76,8 @@ When the instance has received, processed, and emitted the input, the entry even ##### Parameters -- `source` - *Buffer | Stream | String* - The input source. -- `data` - *Object* - [The entry data](#entry-data). +- `source` - _Buffer | Stream | String_ - The input source. +- `data` - _Object_ - [The entry data](#entry-data). --- @@ -91,9 +91,9 @@ Appends a directory and its files, recursively, given its dirpath. ##### Parameters -- `dirpath` - *String* - The source directory path. -- `destpath` - *String* - The destination path within the archive. -- `data` - *Object* - [The entry data](#entry-data). +- `dirpath` - _String_ - The source directory path. +- `destpath` - _String_ - The destination path within the archive. +- `data` - _Object_ - [The entry data](#entry-data). --- @@ -109,8 +109,8 @@ When the instance has received, processed, and emitted the file, the entry event ##### Parameters -- `filepath` - *String* - The source filepath. -- `data` - *Object* - [The entry data](#entry-data). +- `filepath` - _String_ - The source filepath. +- `data` - _Object_ - [The entry data](#entry-data). --- @@ -124,7 +124,6 @@ Finalizes the instance and prevents further appending to the archive structure ( The `end`, `close` or `finish` events on the destination stream may fire right after calling this method so you should set listeners beforehand to properly detect stream completion. - ##### Parameters None @@ -141,9 +140,9 @@ Appends multiple files that match a glob pattern. ##### Parameters -- `pattern` - *String* - The [glob pattern](https://github.com/isaacs/minimatch) to match. -- `options` - *Object* - Options passed to [node-readdir-glob](https://github.com/yqnn/node-readdir-glob#options), plus an optional `cwd` property that sets the directory to read (defaults to `'.'`). -- `data` - *Object* - [The entry data](#entry-data). +- `pattern` - _String_ - The [glob pattern](https://github.com/isaacs/minimatch) to match. +- `options` - _Object_ - Options passed to [node-readdir-glob](https://github.com/yqnn/node-readdir-glob#options), plus an optional `cwd` property that sets the directory to read (defaults to `'.'`). +- `data` - _Object_ - [The entry data](#entry-data). --- @@ -171,7 +170,7 @@ Sets the module format name used for archiving. ##### Parameters -- `format` - *String* - The name of the format. +- `format` - _String_ - The name of the format. --- @@ -185,7 +184,7 @@ Sets the module used for archiving. ##### Parameters -- `module` - *Function* - The function for archiver to interact with. +- `module` - _Function_ - The function for archiver to interact with. --- @@ -201,9 +200,9 @@ This does NOT interact with filesystem and is used for programmatically creating ##### Parameters -- `filepath` - *String* - The symlink path (within archive). -- `target` - *String* - The target path (within archive). -- `mode` - *Number* - The entry permissions. +- `filepath` - _String_ - The symlink path (within archive). +- `target` - _String_ - The target path (within archive). +- `mode` - _Number_ - The entry permissions. ## Events @@ -219,28 +218,28 @@ The `entry` event object contains the following properties: The `progress` event object contains the following properties: -- `entries` - *Object* - An object containing the following properties: - - `total` - *Number* - The number of entries that have been appended. - - `processed` - *Number* - The number of entries that have been processed. +- `entries` - _Object_ - An object containing the following properties: + - `total` - _Number_ - The number of entries that have been appended. + - `processed` - _Number_ - The number of entries that have been processed. - `fs` - Object - An object containing the following properties: - - `totalBytes` - *Number* - The number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) - - `processedBytes` - *Number* - The number of bytes that have been processed. (based on fs.Stats) + - `totalBytes` - _Number_ - The number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) + - `processedBytes` - _Number_ - The number of bytes that have been processed. (based on fs.Stats) #### Event: error The `error` event object contains the following properties: -- `message` - *String* - The message of the error. -- `code` - *String* - The error code assigned to this error. -- `data` - *Object* - Additional data provided for reporting or debugging (where available). +- `message` - _String_ - The message of the error. +- `code` - _String_ - The error code assigned to this error. +- `data` - _Object_ - Additional data provided for reporting or debugging (where available). #### Event: warning The `warning` event object contains the following properties: -- `message` - *String* - The message of the error. -- `code` - *String* - The error code assigned to this error. -- `data` - *Object* - Additional data provided for reporting or debugging (where available). +- `message` - _String_ - The message of the error. +- `code` - _String_ - The error code assigned to this error. +- `data` - _Object_ - Additional data provided for reporting or debugging (where available). ## Entry Data @@ -248,36 +247,36 @@ The entry data object may contain the following properties: #### Core Entry Properties -- `name` - *String* - Sets the entry name including internal path. -- `date` - *String | Date* - Sets the entry date. -- `mode` - *Number* - Sets the entry permissions. -- `prefix` - *String* - Sets a path prefix for the entry name. Useful when working with methods like [directory](#directory) or [glob](#glob). -- `stats` - *fs.Stats* - Sets the stat data for this entry allowing for reduction of fs.stat calls. +- `name` - _String_ - Sets the entry name including internal path. +- `date` - _String | Date_ - Sets the entry date. +- `mode` - _Number_ - Sets the entry permissions. +- `prefix` - _String_ - Sets a path prefix for the entry name. Useful when working with methods like [directory](#directory) or [glob](#glob). +- `stats` - _fs.Stats_ - Sets the stat data for this entry allowing for reduction of fs.stat calls. #### ZIP Entry Properties -- `namePrependSlash` - *Boolean* - Prepends a forward slash to archive file paths. -- `store` - *Boolean* - Sets the compression method to STORE. +- `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. +- `store` - _Boolean_ - Sets the compression method to STORE. ## Format Registration ### registerFormat ```js -registerFormat(format, module) +registerFormat(format, module); ``` Registers a format for use with archiver. ##### Parameters -- `format` - *String* - The name of the format. -- `module` - *Function* - The function for archiver to interact with. +- `format` - _String_ - The name of the format. +- `module` - _Function_ - The function for archiver to interact with. #### module ```js -module(options) +module(options); ``` The `module` function should consist of the following: @@ -308,11 +307,11 @@ module.prototype.finalize() {} ### isFormatRegistered ```js -isRegisteredFormat(format) +isRegisteredFormat(format); ``` Check if the format is already registered. ##### Parameters -- `format` - *String* - The name of the format. +- `format` - _String_ - The name of the format. diff --git a/website/docs/quickstart.md b/website/docs/quickstart.md index 2c88f421..ff366e2b 100644 --- a/website/docs/quickstart.md +++ b/website/docs/quickstart.md @@ -13,32 +13,34 @@ Archiver is available on [npm](https://www.npmjs.com/package/archiver). ```js // require modules -const fs = require('fs'); -const archiver = require('archiver'); +const fs = require("fs"); +const archiver = require("archiver"); // create a file to stream archive data to. -const output = fs.createWriteStream(__dirname + '/example.zip'); -const archive = archiver('zip', { - zlib: { level: 9 } // Sets the compression level. +const output = fs.createWriteStream(__dirname + "/example.zip"); +const archive = archiver("zip", { + zlib: { level: 9 }, // Sets the compression level. }); // listen for all archive data to be written // 'close' event is fired only when a file descriptor is involved -output.on('close', function() { - console.log(archive.pointer() + ' total bytes'); - console.log('archiver has been finalized and the output file descriptor has closed.'); +output.on("close", function () { + console.log(archive.pointer() + " total bytes"); + console.log( + "archiver has been finalized and the output file descriptor has closed.", + ); }); // This event is fired when the data source is drained no matter what was the data source. // It is not part of this library but rather from the NodeJS Stream API. // @see: https://nodejs.org/api/stream.html#stream_event_end -output.on('end', function() { - console.log('Data has been drained'); +output.on("end", function () { + console.log("Data has been drained"); }); // good practice to catch warnings (ie stat failures and other non-blocking errors) -archive.on('warning', function(err) { - if (err.code === 'ENOENT') { +archive.on("warning", function (err) { + if (err.code === "ENOENT") { // log warning } else { // throw error @@ -47,7 +49,7 @@ archive.on('warning', function(err) { }); // good practice to catch this error explicitly -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); @@ -55,27 +57,27 @@ archive.on('error', function(err) { archive.pipe(output); // append a file from stream -const file1 = __dirname + '/file1.txt'; -archive.append(fs.createReadStream(file1), { name: 'file1.txt' }); +const file1 = __dirname + "/file1.txt"; +archive.append(fs.createReadStream(file1), { name: "file1.txt" }); // append a file from string -archive.append('string cheese!', { name: 'file2.txt' }); +archive.append("string cheese!", { name: "file2.txt" }); // append a file from buffer -const buffer3 = Buffer.from('buff it!'); -archive.append(buffer3, { name: 'file3.txt' }); +const buffer3 = Buffer.from("buff it!"); +archive.append(buffer3, { name: "file3.txt" }); // append a file -archive.file('file1.txt', { name: 'file4.txt' }); +archive.file("file1.txt", { name: "file4.txt" }); // append files from a sub-directory and naming it `new-subdir` within the archive -archive.directory('subdir/', 'new-subdir'); +archive.directory("subdir/", "new-subdir"); // append files from a sub-directory, putting its contents at the root of archive -archive.directory('subdir/', false); +archive.directory("subdir/", false); // append files from a glob pattern -archive.glob('file*.txt', {cwd:__dirname}); +archive.glob("file*.txt", { cwd: __dirname }); // finalize the archive (ie we are done appending files but streams have to finish yet) // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 186bf925..b6bdd004 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -1,25 +1,25 @@ module.exports = { - title: 'Archiver', - tagline: 'A streaming interface for archive generation.', - url: 'https://www.archiverjs.com', - baseUrl: '/', - onBrokenLinks: 'throw', - onBrokenMarkdownLinks: 'warn', - favicon: 'img/favicon.ico', - organizationName: 'archiverjs', - projectName: 'node-archiver', - themeConfig: { + title: "Archiver", + tagline: "A streaming interface for archive generation.", + url: "https://www.archiverjs.com", + baseUrl: "/", + onBrokenLinks: "throw", + onBrokenMarkdownLinks: "warn", + favicon: "img/favicon.ico", + organizationName: "archiverjs", + projectName: "node-archiver", + themeConfig: { navbar: { - title: 'Archiver', + title: "Archiver", logo: { - alt: 'Archiver Logo', - src: 'img/logo.svg', + alt: "Archiver Logo", + src: "img/logo.svg", }, items: [ { - to: 'docs/quickstart', - label: 'Docs', - position: 'left', + to: "docs/quickstart", + label: "Docs", + position: "left", }, { to: "docs/archiver", @@ -27,42 +27,42 @@ module.exports = { position: "left", }, { - href: 'https://github.com/archiverjs/node-archiver/', - label: 'GitHub', - position: 'right', + href: "https://github.com/archiverjs/node-archiver/", + label: "GitHub", + position: "right", }, ], }, footer: { - style: 'dark', + style: "dark", links: [ { - title: 'Docs', + title: "Docs", items: [ { - label: 'Get Started', - to: 'docs/quickstart', + label: "Get Started", + to: "docs/quickstart", }, { - label: 'Archive Formats', - to: 'docs/archive-formats', + label: "Archive Formats", + to: "docs/archive-formats", }, { - label: 'API Reference', - to: 'docs/archiver', + label: "API Reference", + to: "docs/archiver", }, ], }, { - title: 'More', + title: "More", items: [ { - label: 'ZipStream', - to: 'zipstream', + label: "ZipStream", + to: "zipstream", }, { - label: 'GitHub', - href: 'https://github.com/archiverjs/', + label: "GitHub", + href: "https://github.com/archiverjs/", }, ], }, @@ -72,21 +72,23 @@ module.exports = { }, presets: [ [ - '@docusaurus/preset-classic', + "@docusaurus/preset-classic", { docs: { - sidebarPath: require.resolve('./sidebars.js'), - editUrl: 'https://github.com/archiverjs/node-archiver/edit/master/website/', + sidebarPath: require.resolve("./sidebars.js"), + editUrl: + "https://github.com/archiverjs/node-archiver/edit/master/website/", }, blog: { showReadingTime: true, - editUrl: 'https://github.com/archiverjs/node-archiver/edit/master/website/blog/', + editUrl: + "https://github.com/archiverjs/node-archiver/edit/master/website/blog/", }, theme: { - customCss: require.resolve('./src/css/custom.css'), + customCss: require.resolve("./src/css/custom.css"), }, gtag: { - trackingID: 'UA-75847652-4', + trackingID: "UA-75847652-4", anonymizeIP: true, }, }, diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 5bf5a5e9..1c9e37e2 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -1,14 +1,14 @@ -import React from 'react'; -import clsx from 'clsx'; -import Layout from '@theme/Layout'; -import Link from '@docusaurus/Link'; -import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; -import useBaseUrl from '@docusaurus/useBaseUrl'; -import styles from './styles.module.css'; +import React from "react"; +import clsx from "clsx"; +import Layout from "@theme/Layout"; +import Link from "@docusaurus/Link"; +import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import styles from "./styles.module.css"; const features = [ { - title: 'Streaming', + title: "Streaming", description: ( <> Archiver was designed to use native node streams as its data transport. @@ -16,20 +16,20 @@ const features = [ ), }, { - title: 'Extendable', + title: "Extendable", description: ( <> - Archiver can be extended to support different archive formats - while reusing the same composition API. + Archiver can be extended to support different archive formats while + reusing the same composition API. ), }, ]; -function Feature({imageUrl, title, description}) { +function Feature({ imageUrl, title, description }) { const imgUrl = useBaseUrl(imageUrl); return ( -
+
{imgUrl && (
{title} @@ -43,22 +43,21 @@ function Feature({imageUrl, title, description}) { export default function Home() { const context = useDocusaurusContext(); - const {siteConfig = {}} = context; + const { siteConfig = {} } = context; return ( - -
+ +

{siteConfig.title}

{siteConfig.tagline}

+ to={useBaseUrl("docs/quickstart")} + > Get Started
diff --git a/website/src/pages/zipstream.md b/website/src/pages/zipstream.md index 2500b9b4..6920cb8c 100644 --- a/website/src/pages/zipstream.md +++ b/website/src/pages/zipstream.md @@ -19,7 +19,7 @@ ZipStream is available on [npm](https://www.npmjs.com/package/zip-stream). ## ZipStream Class ```js -new ZipStream(options) +new ZipStream(options); ``` ### constructor @@ -30,12 +30,12 @@ new ZipStream(options) The `options` object may contain the following properties: -- `comment` - *String* - Sets the zip archive comment. -- `forceLocalTime` - *Boolean* - Forces the archive to contain local file times instead of UTC. -- `forceZip64` - *Boolean* - Forces the archive to contain ZIP64 headers. -- `namePrependSlash` - *Boolean* - Prepends a forward slash to archive file paths. -- `store` - *Boolean* - Sets the compression method to STORE. -- `zlib` - *Object* - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression +- `comment` - _String_ - Sets the zip archive comment. +- `forceLocalTime` - _Boolean_ - Forces the archive to contain local file times instead of UTC. +- `forceZip64` - _Boolean_ - Forces the archive to contain ZIP64 headers. +- `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. +- `store` - _Boolean_ - Sets the compression method to STORE. +- `zlib` - _Object_ - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression --- @@ -49,19 +49,19 @@ entry(source, data, callback) → {this} ##### Parameters -- `source` - *Buffer | Stream | String* - The input source. -- `data` - *Object* - The entry data. -- `callback` - *Function* +- `source` - _Buffer | Stream | String_ - The input source. +- `data` - _Object_ - The entry data. +- `callback` - _Function_ The `data` object may contain the following properties: -- `name` - *String* - The entry name including internal path. -- `comment` - *String* - The entry comment. -- `date` - *String | Date* - The entry date. -- `mode` - *Number* - The entry permissions. -- `namePrependSlash` - *Boolean* - Prepends a forward slash to archive file paths. -- `store` - *Boolean* - The compression method to STORE. -- `type` - *String* - The entry type. Defaults to `directory` if name ends with trailing slash. +- `name` - _String_ - The entry name including internal path. +- `comment` - _String_ - The entry comment. +- `date` - _String | Date_ - The entry date. +- `mode` - _Number_ - The entry permissions. +- `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. +- `store` - _Boolean_ - The compression method to STORE. +- `type` - _String_ - The entry type. Defaults to `directory` if name ends with trailing slash. ---