From 34a20ab4f08a1ee79a41c820c80471034e1a03df Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 14:27:09 +0800 Subject: [PATCH 01/78] support TypeScript --- .env.example | 5 + .eslintrc.js | 147 +- .github/workflows/lint.yml | 5 +- .github/workflows/nodejs-windows.yml | 2 +- .gitignore | 2 + .husky/pre-commit | 1 + .mocharc.js | 7 + CONTRIBUTING.md | 14 +- MAINTAINERS.md | 18 +- babel-register.js | 17 + build.mjs | 121 + package-lock.json | 9054 ++++++-------------------- package.json | 80 +- src/AssumeRoleProvider.js | 218 + src/CredentialProvider.js | 50 + src/Credentials.js | 37 + src/base-error.ts | 30 + src/errors.ts | 107 + src/extensions.js | 175 + src/helpers.js | 822 +++ src/minio.js | 3987 ++++++++++++ src/notification.js | 200 + src/object-uploader.js | 287 + src/s3-endpoints.js | 50 + src/signing.js | 299 + src/transformers.js | 263 + src/xml-parsers.js | 709 ++ tests/functional/functional-tests.js | 4718 ++++++++++++++ tests/unit/test.js | 2107 ++++++ tsconfig.json | 20 + types/minio.d.ts | 775 +++ 31 files changed, 17324 insertions(+), 7003 deletions(-) create mode 100644 .env.example mode change 100755 => 100644 .husky/pre-commit create mode 100644 .mocharc.js create mode 100644 babel-register.js create mode 100644 build.mjs create mode 100644 src/AssumeRoleProvider.js create mode 100644 src/CredentialProvider.js create mode 100644 src/Credentials.js create mode 100644 src/base-error.ts create mode 100644 src/errors.ts create mode 100644 src/extensions.js create mode 100644 src/helpers.js create mode 100644 src/minio.js create mode 100644 src/notification.js create mode 100644 src/object-uploader.js create mode 100644 src/s3-endpoints.js create mode 100644 src/signing.js create mode 100644 src/transformers.js create mode 100644 src/xml-parsers.js create mode 100644 tests/functional/functional-tests.js create mode 100644 tests/unit/test.js create mode 100644 tsconfig.json create mode 100644 types/minio.d.ts diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..2dcbf941 --- /dev/null +++ b/.env.example @@ -0,0 +1,5 @@ +# copy this file as .env for testing env + +SERVER_ENDPOINT="" +ACCESS_KEY="" +SECRET_KEY="" diff --git a/.eslintrc.js b/.eslintrc.js index 8f068162..767ceb9b 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -4,68 +4,117 @@ module.exports = { mocha: true, es6: true, }, - ignorePatterns: ['src/test/*.*', 'examples/**/*'], - overrides: [], extends: [ 'eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier', // This should be the last entry. ], parser: '@typescript-eslint/parser', - plugins: ['@typescript-eslint', 'simple-import-sort'], + plugins: ['@typescript-eslint', 'simple-import-sort', 'unused-imports', 'import', 'unicorn'], parserOptions: { sourceType: 'module', - ecmaVersion: 8, + ecmaVersion: 2022, + }, + ignorePatterns: ['examples/**/*', 'dist/**/*'], + settings: { + 'import/parsers': { + '@typescript-eslint/parser': ['.ts'], + }, + // we need to config this so import are fully specified + // otherwise @babel/register can't handle TypeScript files + 'import/resolver': { + typescript: { + alwaysTryTypes: false, + extensionAlias: { + '.js': ['.js'], + }, + extensions: ['.ts', '.js', '.mjs'], + fullySpecified: true, + enforceExtension: true, + }, + }, }, rules: { - 'no-console': ['error'], - // "no-var": ["error"], - 'comma-dangle': 0, curly: ['error'], - 'prefer-const': 0, - 'no-template-curly-in-string': 'error', - // "quotes": ["error", "double"], - 'comma-spacing': 0, // ["error", { before: false, after: true }], - 'semi-spacing': 0, // ["warn", { before: false, after: true }], - 'space-before-blocks': 0, // ["warn", "always"], - 'switch-colon-spacing': ['warn', { after: true, before: false }], - 'keyword-spacing': 0, // ["warn", { before: true, after: true }], - 'template-curly-spacing': 0, // ["error", "never"], - 'rest-spread-spacing': 0, // ["error", "never"], - 'no-multi-spaces': 0, // ["warn", { ignoreEOLComments: false }], - + // import node stdlib as `node:...` + // don't worry, babel will remove these prefix. + 'unicorn/prefer-node-protocol': 'error', 'simple-import-sort/imports': 'error', - 'simple-import-sort/exports': 'error', - indent: 'off', - 'linebreak-style': ['error', 'unix'], - semi: ['error', 'never'], - 'spaced-comment': [ - 'error', - 'always', - { - line: { - markers: ['/'], - exceptions: ['-', '+'], - }, - block: { - markers: ['!'], - exceptions: ['*'], - balanced: true, - }, - }, - ], - '@typescript-eslint/no-explicit-any': ['warn'], + }, + overrides: [ + { + files: './src/**/*', + excludedFiles: ['tests/*.*'], + rules: { + 'no-console': ['error'], + 'prefer-const': 0, + 'no-template-curly-in-string': 'error', + // "quotes": ["error", "double"], + 'comma-spacing': 0, // ["error", { before: false, after: true }], + 'semi-spacing': 0, // ["warn", { before: false, after: true }], + 'space-before-blocks': 0, // ["warn", "always"], + 'switch-colon-spacing': ['warn', { after: true, before: false }], + 'keyword-spacing': 0, // ["warn", { before: true, after: true }], + 'template-curly-spacing': 0, // ["error", "never"], + 'rest-spread-spacing': 0, // ["error", "never"], + 'no-multi-spaces': 0, // ["warn", { ignoreEOLComments: false }], + 'simple-import-sort/exports': 'error', + indent: 'off', + 'linebreak-style': ['error', 'unix'], + semi: ['error', 'never'], + 'spaced-comment': [ + 'error', + 'always', + { + line: { + markers: ['/'], + exceptions: ['-', '+'], + }, + block: { + markers: ['!'], + exceptions: ['*'], + balanced: true, + }, + }, + ], - '@typescript-eslint/prefer-optional-chain': 0, // ["warn"], - 'no-empty-function': 0, - '@typescript-eslint/no-empty-function': 0, // ["warn"], - '@typescript-eslint/no-var-requires': 0, - '@typescript-eslint/no-this-alias': 0, - '@typescript-eslint/no-empty-interface': ['warn'], + 'unused-imports/no-unused-imports': 'error', + '@typescript-eslint/consistent-type-imports': [ + 'error', + { + prefer: 'type-imports', + fixStyle: 'separate-type-imports', + }, + ], - '@typescript-eslint/no-array-constructor': ['off'], + '@typescript-eslint/no-explicit-any': ['warn'], - 'no-extra-parens': 0, - '@typescript-eslint/no-extra-parens': 0, - }, + '@typescript-eslint/prefer-optional-chain': 0, // ["warn"], + '@typescript-eslint/no-empty-function': 0, // ["warn"], + '@typescript-eslint/no-var-requires': 0, + '@typescript-eslint/no-this-alias': 0, + '@typescript-eslint/no-empty-interface': ['warn'], + + '@typescript-eslint/no-array-constructor': ['off'], + + 'no-extra-parens': 0, + '@typescript-eslint/no-extra-parens': 0, + 'import/extensions': ['error', 'always'], + }, + }, + { + files: ['./src/**/*', './tests/**/*'], + rules: { + 'import/no-commonjs': 'error', + 'import/no-amd': 'error', + }, + }, + { + files: ['./tests/**/*'], + rules: { + 'no-empty-function': 0, + '@typescript-eslint/no-empty-function': 0, + }, + }, + ], } diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 39dcae1d..83ff866b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -35,5 +35,6 @@ jobs: - run: npm ci - - run: npm run compile - - run: npm run browserify + - run: npm run type-check + + - run: npm run build diff --git a/.github/workflows/nodejs-windows.yml b/.github/workflows/nodejs-windows.yml index 660fc006..85ee8856 100644 --- a/.github/workflows/nodejs-windows.yml +++ b/.github/workflows/nodejs-windows.yml @@ -9,7 +9,7 @@ on: - master jobs: - build: + test: name: Test on node ${{ matrix.node_version }} and ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: diff --git a/.gitignore b/.gitignore index 1db8b977..5e2c97e7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,12 +1,14 @@ # Logs logs *.log +.vscode/ .idea/ .DS_Store # Dependency directory # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git node_modules +/.env /dist/ yarn.lock .yarn/ diff --git a/.husky/pre-commit b/.husky/pre-commit old mode 100755 new mode 100644 index 57757f4e..268706fc --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,4 +1,5 @@ #!/bin/sh . "$(dirname "$0")/_/husky.sh" +npm run type-check npm run lint-staged diff --git a/.mocharc.js b/.mocharc.js new file mode 100644 index 00000000..f90a7f86 --- /dev/null +++ b/.mocharc.js @@ -0,0 +1,7 @@ +module.exports = { + spec: 'tests/**/*.js', + exit: true, + reporter: 'spec', + ui: 'bdd', + require: ['dotenv/config', 'source-map-support/register', './babel-register.js'], +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fd87ba78..81d1c28f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,13 +1,12 @@ ### Setup your minio-js Github Repository Fork [minio-js upstream](https://github.com/minio/minio-js/fork) source repository to your own personal repository. -MinIO Javascript library uses gulp for its dependency management http://gulpjs.com/ - ```bash $ git clone https://github.com/$USER_ID/minio-js $ cd minio-js $ npm install -$ gulp +$ npm test +$ npm build ... ``` @@ -21,3 +20,12 @@ $ gulp - Commit your changes (git commit -am 'Add some feature') - Push to the branch (git push origin my-new-feature) - Create new Pull Request + +### Style Guide + +We are currently migrating from JavaScript to TypeScript, so **All Source should be written in [ESM](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules)** + +That means only use nodejs `require` in js config file like `.eslintrc.js` + +You should always fully specify your import path extension, +which means you should write `import * from "errors.ts"` for `errors.ts` file, do not write `import "errors.js"`. diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 99a206f3..1d27fb42 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,5 +1,5 @@ # For maintainers only -MinIO JS SDK uses [npm4+](https://www.npmjs.org/) build system. +Development of MinIO JS SDK require nodejs14+ and [npm7+](https://www.npmjs.org/). ## Responsibilities Go through [Maintainer Responsibility Guide](https://gist.github.com/abperiasamy/f4d9b31d3186bbd26522). @@ -11,12 +11,22 @@ $ git clone git@github.com:minio/minio-js $ cd minio-js ``` -### Build and verify -Run `install` gulp task to build and verify the SDK. -```sh +### Install deps +```shell $ npm install ``` +### Testing +```shell +$ npm test +``` + +### Build +Build project for release +```sh +$ npm run build +``` + ## Publishing new release Edit `package.json` version and all other files to the latest version as shown below. ```sh diff --git a/babel-register.js b/babel-register.js new file mode 100644 index 00000000..65cc3b6e --- /dev/null +++ b/babel-register.js @@ -0,0 +1,17 @@ +// fix babel register doesn't transform TypeScript +// +// https://github.com/babel/babel/issues/8962#issuecomment-443135379 + +// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs +const register = require('@babel/register') + +register({ + extensions: ['.ts', '.js'], + plugins: [ + '@upleveled/remove-node-prefix', // lower version of node (<14) doesn't support require('node:fs') + ], + presets: [ + ['@babel/preset-typescript', { allExtensions: true }], + ['@babel/preset-env', { targets: { node: 'current' }, modules: 'cjs' }], + ], +}) diff --git a/build.mjs b/build.mjs new file mode 100644 index 00000000..bba89db1 --- /dev/null +++ b/build.mjs @@ -0,0 +1,121 @@ +import { execSync } from 'node:child_process' +import * as fs from 'node:fs' +import * as fsp from 'node:fs/promises' +import * as path from 'node:path' + +import * as babel from '@babel/core' +import * as fsWalk from '@nodelib/fs.walk' + +const pkg = JSON.parse(fs.readFileSync('package.json').toString()) + +/** + * @param {'esm'|'cjs'} module + */ +function options(module) { + return { + sourceMaps: 'inline', + plugins: [ + ['@upleveled/remove-node-prefix'], + [ + 'replace-import-extension', + { + extMapping: { + '.ts': extMap[module], + '.js': extMap[module], + }, + }, + ], + [ + 'babel-plugin-transform-replace-expressions', + { + replace: { + 'process.env.MINIO_JS_PACKAGE_VERSION': JSON.stringify(pkg.version), + }, + }, + ], + ], + presets: [ + ['@babel/env', { targets: { node: '8' }, modules: module === 'esm' ? false : module }], + ['@babel/preset-typescript'], + ], + } +} + +const extMap = { cjs: '.js', esm: '.mjs' } + +async function buildFiles({ files, module, outDir }) { + console.log(`building for ${module}`) + execSync(`npx tsc --outDir ${outDir}`, { stdio: 'inherit' }) + + const opt = options(module) + for (const file of files) { + if (!file.dirent.isFile()) { + continue + } + + if (file.path.endsWith('.d.ts')) { + continue + } + + const outFilePath = path.join(outDir, path.relative('src/', file.path)) + const outDirPath = path.dirname(outFilePath) + + await fsp.mkdir(outDirPath, { recursive: true }) + + try { + const result = await babel.transformAsync(fs.readFileSync(file.path).toString(), { + filename: file.path, + ...opt, + }) + + const distCodePath = outFilePath.replace(/\.[tj]s$/g, extMap[module]) + + fs.writeFileSync(distCodePath, result.code) + } catch (e) { + console.error(`failed to transpile ${file.path}`) + throw e + } + } +} + +async function main() { + await fsp.rm('dist', { recursive: true, force: true }) + + const entries = fsWalk.walkSync('src/') + await buildFiles({ + files: entries, + module: 'cjs', + outDir: './dist/main/', + }) + + await buildFiles({ + files: entries, + module: 'esm', + outDir: './dist/esm/', + }) + + for (const file of fsWalk.walkSync('dist/esm/')) { + if (file.dirent.isDirectory()) { + continue + } + + if (!file.path.endsWith('.d.ts')) { + continue + } + + const fileContent = fs.readFileSync(file.path).toString() + + const mts = babel.transformSync(fileContent, { + filename: file.path, + sourceMaps: true, + plugins: [['@babel/plugin-syntax-typescript'], ['replace-import-extension', { extMapping: { '.ts': '.mjs' } }]], + }) + + await fsp.unlink(file.path) + + const outFilePath = file.path.slice(0, file.path.length - '.d.ts'.length) + '.d.mts' + await fsp.writeFile(outFilePath, mts.code) + } +} + +await main() diff --git a/package-lock.json b/package-lock.json index e98da5ea..9062b008 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,8 +13,6 @@ "block-stream2": "^2.0.0", "browser-or-node": "^1.3.0", "buffer-crc32": "^0.2.13", - "crypto-browserify": "^3.12.0", - "es6-error": "^4.1.1", "fast-xml-parser": "^4.1.3", "ipaddr.js": "^2.0.1", "json-stream": "^1.0.0", @@ -30,22 +28,30 @@ "devDependencies": { "@babel/core": "^7.12.10", "@babel/preset-env": "^7.12.10", + "@babel/preset-typescript": "^7.21.4", + "@babel/register": "^7.21.0", + "@nodelib/fs.walk": "^1.2.8", + "@types/async": "^3.2.18", + "@types/browser-or-node": "^1.3.0", "@types/lodash": "^4.14.192", + "@types/mime-types": "^2.1.1", "@types/node": "^18.15.11", + "@types/xml": "^1.0.8", + "@types/xml2js": "^0.4.11", "@typescript-eslint/eslint-plugin": "^5.57.1", "@typescript-eslint/parser": "^5.57.1", - "babelify": "^10.0.0", - "browserify": "^16.5.2", + "@upleveled/babel-plugin-remove-node-prefix": "^1.0.4", + "babel-plugin-replace-import-extension": "^1.1.3", + "babel-plugin-transform-replace-expressions": "^0.2.0", "chai": "^4.2.0", + "dotenv": "^16.0.3", "eslint": "^8.37.0", "eslint-config-prettier": "^8.8.0", + "eslint-import-resolver-typescript": "^3.5.5", + "eslint-plugin-import": "^2.27.5", "eslint-plugin-simple-import-sort": "^10.0.0", - "gulp": "^4.0.2", - "gulp-babel": "^8.0.0", - "gulp-eslint": "^6.0.0", - "gulp-if": "^3.0.0", - "gulp-mocha": "^8.0.0", - "gulp-sourcemaps": "^2.6.5", + "eslint-plugin-unicorn": "^46.0.0", + "eslint-plugin-unused-imports": "^2.0.0", "husky": "^8.0.3", "lint-staged": "^13.2.1", "mocha": "^9.2.0", @@ -55,7 +61,8 @@ "source-map-support": "^0.5.13", "split-file": "^2.2.2", "superagent": "^5.1.0", - "uuid": "^3.4.0" + "typescript": "^5.0.4", + "uuid": "^9.0.0" }, "engines": { "node": ">8 <=19" @@ -66,6 +73,7 @@ "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jridgewell/gen-mapping": "^0.3.0", "@jridgewell/trace-mapping": "^0.3.9" @@ -79,6 +87,7 @@ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.21.4.tgz", "integrity": "sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==", "dev": true, + "license": "MIT", "dependencies": { "@babel/highlight": "^7.18.6" }, @@ -91,6 +100,7 @@ "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.21.4.tgz", "integrity": "sha512-/DYyDpeCfaVinT40FPGdkkb+lYSKvsVuMjDAG7jPOWWiM1ibOaB9CXJAlc4d1QpP/U2q2P9jbrSlClKSErd55g==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -125,11 +135,18 @@ "url": "https://opencollective.com/babel" } }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "dev": true + }, "node_modules/@babel/generator": { "version": "7.21.4", "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.21.4.tgz", "integrity": "sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.21.4", "@jridgewell/gen-mapping": "^0.3.2", @@ -145,6 +162,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz", "integrity": "sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.18.6" }, @@ -157,6 +175,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz", "integrity": "sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-explode-assignable-expression": "^7.18.6", "@babel/types": "^7.18.9" @@ -170,6 +189,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.21.4.tgz", "integrity": "sha512-Fa0tTuOXZ1iL8IeDFUWCzjZcn+sJGd9RZdH9esYVjEejGmzf+FFYQpMi/kZUk2kPy/q1H3/GPw7np8qar/stfg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/compat-data": "^7.21.4", "@babel/helper-validator-option": "^7.21.0", @@ -189,6 +209,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.21.4.tgz", "integrity": "sha512-46QrX2CQlaFRF4TkwfTt6nJD7IHq8539cCL7SDpqWSDeJKY1xylKKY5F/33mJhLZ3mFvKv2gGrVS6NkyF6qs+Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-environment-visitor": "^7.18.9", @@ -211,6 +232,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.21.4.tgz", "integrity": "sha512-M00OuhU+0GyZ5iBBN9czjugzWrEq2vDpf/zCYHxxf93ul/Q5rv+a5h+/+0WnI1AebHNVtl5bFV0qsJoH23DbfA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "regexpu-core": "^5.3.1" @@ -227,6 +249,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz", "integrity": "sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-compilation-targets": "^7.17.7", "@babel/helper-plugin-utils": "^7.16.7", @@ -244,6 +267,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -253,6 +277,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz", "integrity": "sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.18.6" }, @@ -265,6 +290,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz", "integrity": "sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/template": "^7.20.7", "@babel/types": "^7.21.0" @@ -278,6 +304,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.18.6" }, @@ -290,6 +317,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.21.0.tgz", "integrity": "sha512-Muu8cdZwNN6mRRNG6lAYErJ5X3bRevgYR2O8wN0yn7jJSnGDu6eG59RfT29JHxGUovyfrh6Pj0XzmR7drNVL3Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.21.0" }, @@ -302,6 +330,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.21.4.tgz", "integrity": "sha512-orajc5T2PsRYUN3ZryCEFeMDYwyw09c/pZeaQEZPH0MpKzSvn3e0uXsDBu3k03VI+9DBiRo+l22BfKTpKwa/Wg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.21.4" }, @@ -314,6 +343,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz", "integrity": "sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-environment-visitor": "^7.18.9", "@babel/helper-module-imports": "^7.18.6", @@ -333,6 +363,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz", "integrity": "sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.18.6" }, @@ -345,6 +376,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz", "integrity": "sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -354,6 +386,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz", "integrity": "sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-environment-visitor": "^7.18.9", @@ -372,6 +405,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.20.7.tgz", "integrity": "sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-environment-visitor": "^7.18.9", "@babel/helper-member-expression-to-functions": "^7.20.7", @@ -389,6 +423,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz", "integrity": "sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.20.2" }, @@ -401,6 +436,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.20.0.tgz", "integrity": "sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.20.0" }, @@ -413,6 +449,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/types": "^7.18.6" }, @@ -425,6 +462,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz", "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -434,6 +472,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -443,6 +482,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz", "integrity": "sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -452,6 +492,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.20.5.tgz", "integrity": "sha512-bYMxIWK5mh+TgXGVqAtnu5Yn1un+v8DDZtqyzKRLUzrh70Eal2O3aZ7aPYiMADO4uKlkzOiRiZ6GX5q3qxvW9Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-function-name": "^7.19.0", "@babel/template": "^7.18.10", @@ -467,6 +508,7 @@ "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.0.tgz", "integrity": "sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/template": "^7.20.7", "@babel/traverse": "^7.21.0", @@ -481,6 +523,7 @@ "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-validator-identifier": "^7.18.6", "chalk": "^2.0.0", @@ -495,6 +538,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^1.9.0" }, @@ -507,6 +551,7 @@ "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -521,6 +566,7 @@ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dev": true, + "license": "MIT", "dependencies": { "color-name": "1.1.3" } @@ -529,13 +575,15 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@babel/highlight/node_modules/escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.8.0" } @@ -545,6 +593,7 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -554,6 +603,7 @@ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^3.0.0" }, @@ -566,6 +616,7 @@ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.21.4.tgz", "integrity": "sha512-alVJj7k7zIxqBZ7BTRhz0IqJFxW1VJbm6N8JbcYhQ186df9ZBPbZBmWSqAMXwHGsCJdYks7z/voa3ibiS5bCIw==", "dev": true, + "license": "MIT", "bin": { "parser": "bin/babel-parser.js" }, @@ -578,6 +629,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz", "integrity": "sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, @@ -593,6 +645,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.20.7.tgz", "integrity": "sha512-sbr9+wNE5aXMBBFBICk01tt7sBf2Oc9ikRFEcem/ZORup9IMUdNhW7/wVLEbbtlWOsEubJet46mHAL2C8+2jKQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2", "@babel/helper-skip-transparent-expression-wrappers": "^7.20.0", @@ -610,6 +663,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.20.7.tgz", "integrity": "sha512-xMbiLsn/8RK7Wq7VeVytytS2L6qE69bXPB10YCmMdDZbKF4okCqY74pI/jJQ/8U0b/F6NrT2+14b8/P9/3AMGA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-environment-visitor": "^7.18.9", "@babel/helper-plugin-utils": "^7.20.2", @@ -628,6 +682,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" @@ -644,6 +699,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.21.0.tgz", "integrity": "sha512-XP5G9MWNUskFuP30IfFSEFB0Z6HzLIUcjYM4bYOPHXl7eiJ9HFv8tWj6TXTN5QODiEhDZAeI4hLok2iHFFV4hw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-create-class-features-plugin": "^7.21.0", "@babel/helper-plugin-utils": "^7.20.2", @@ -661,6 +717,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz", "integrity": "sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-dynamic-import": "^7.8.3" @@ -677,6 +734,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz", "integrity": "sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.9", "@babel/plugin-syntax-export-namespace-from": "^7.8.3" @@ -693,6 +751,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz", "integrity": "sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-json-strings": "^7.8.3" @@ -709,6 +768,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.20.7.tgz", "integrity": "sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" @@ -725,6 +785,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz", "integrity": "sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" @@ -741,6 +802,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz", "integrity": "sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-numeric-separator": "^7.10.4" @@ -757,6 +819,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz", "integrity": "sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/compat-data": "^7.20.5", "@babel/helper-compilation-targets": "^7.20.7", @@ -776,6 +839,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz", "integrity": "sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" @@ -792,6 +856,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz", "integrity": "sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2", "@babel/helper-skip-transparent-expression-wrappers": "^7.20.0", @@ -809,6 +874,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz", "integrity": "sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-create-class-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" @@ -825,6 +891,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0.tgz", "integrity": "sha512-ha4zfehbJjc5MmXBlHec1igel5TJXXLDDRbuJ4+XT2TJcyD9/V1919BA8gMvsdHcNMBy4WBUBiRb3nw/EQUtBw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-create-class-features-plugin": "^7.21.0", @@ -843,6 +910,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz", "integrity": "sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" @@ -859,6 +927,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -871,6 +940,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.12.13" }, @@ -883,6 +953,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, @@ -898,6 +969,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -910,6 +982,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.3" }, @@ -922,6 +995,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.20.0.tgz", "integrity": "sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.19.0" }, @@ -937,6 +1011,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -944,11 +1019,27 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.21.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.21.4.tgz", + "integrity": "sha512-5hewiLct5OKyh6PLKEYaFclcqtIgCb6bmELouxjF6up5q3Sov7rOayW4RwhbaBL0dit8rA80GNfY+UuDp2mBbQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.20.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-syntax-logical-assignment-operators": { "version": "7.10.4", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, @@ -961,6 +1052,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -973,6 +1065,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.10.4" }, @@ -985,6 +1078,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -997,6 +1091,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -1009,6 +1104,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, @@ -1021,6 +1117,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, @@ -1036,6 +1133,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.14.5" }, @@ -1046,11 +1144,27 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.21.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.21.4.tgz", + "integrity": "sha512-xz0D39NvhQn4t4RNsHmDnnsaQizIlUkdtYvLs8La1BlfjQ6JEwxkJGeqJMW2tAXx+q6H+WFuUTXNdYVpEya0YA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.20.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-transform-arrow-functions": { "version": "7.20.7", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.20.7.tgz", "integrity": "sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2" }, @@ -1066,6 +1180,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.20.7.tgz", "integrity": "sha512-Uo5gwHPT9vgnSXQxqGtpdufUiWp96gk7yiP4Mp5bm1QMkEmLXBO7PAGYbKoJ6DhAwiNkcHFBol/x5zZZkL/t0Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.20.2", @@ -1083,6 +1198,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz", "integrity": "sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, @@ -1098,6 +1214,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.21.0.tgz", "integrity": "sha512-Mdrbunoh9SxwFZapeHVrwFmri16+oYotcZysSzhNIVDwIAb1UV+kvnxULSYq9J3/q5MDG+4X6w8QVgD1zhBXNQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2" }, @@ -1113,6 +1230,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.21.0.tgz", "integrity": "sha512-RZhbYTCEUAe6ntPehC4hlslPWosNHDox+vAs4On/mCLRLfoDVHf6hVEd7kuxr1RnHwJmxFfUM3cZiZRmPxJPXQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-compilation-targets": "^7.20.7", @@ -1136,6 +1254,7 @@ "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -1145,6 +1264,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.20.7.tgz", "integrity": "sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2", "@babel/template": "^7.20.7" @@ -1161,6 +1281,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.21.3.tgz", "integrity": "sha512-bp6hwMFzuiE4HqYEyoGJ/V2LeIWn+hLVKc4pnj++E5XQptwhtcGmSayM029d/j2X1bPKGTlsyPwAubuU22KhMA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2" }, @@ -1176,6 +1297,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz", "integrity": "sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" @@ -1192,6 +1314,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz", "integrity": "sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.9" }, @@ -1207,6 +1330,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz", "integrity": "sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-builder-binary-assignment-operator-visitor": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" @@ -1223,6 +1347,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.21.0.tgz", "integrity": "sha512-LlUYlydgDkKpIY7mcBWvyPPmMcOphEyYA27Ef4xpbh1IiDNLr0kZsos2nf92vz3IccvJI25QUwp86Eo5s6HmBQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2" }, @@ -1238,6 +1363,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz", "integrity": "sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-compilation-targets": "^7.18.9", "@babel/helper-function-name": "^7.18.9", @@ -1255,6 +1381,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz", "integrity": "sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.9" }, @@ -1270,6 +1397,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz", "integrity": "sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, @@ -1285,6 +1413,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.20.11.tgz", "integrity": "sha512-NuzCt5IIYOW0O30UvqktzHYR2ud5bOWbY0yaxWZ6G+aFzOMJvrs5YHNikrbdaT15+KNO31nPOy5Fim3ku6Zb5g==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-module-transforms": "^7.20.11", "@babel/helper-plugin-utils": "^7.20.2" @@ -1318,6 +1447,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.20.11.tgz", "integrity": "sha512-vVu5g9BPQKSFEmvt2TA4Da5N+QVS66EX21d8uoOihC+OCpUoGvzVsXeqFdtAEfVa5BILAeFt+U7yVmLbQnAJmw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-module-transforms": "^7.20.11", @@ -1336,6 +1466,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz", "integrity": "sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-module-transforms": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" @@ -1352,6 +1483,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.20.5.tgz", "integrity": "sha512-mOW4tTzi5iTLnw+78iEq3gr8Aoq4WNRGpmSlrogqaiCBoR1HFhpU4JkpQFOHfeYx3ReVIFWOQJS4aZBRvuZ6mA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.20.5", "@babel/helper-plugin-utils": "^7.20.2" @@ -1368,6 +1500,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz", "integrity": "sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, @@ -1383,6 +1516,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz", "integrity": "sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-replace-supers": "^7.18.6" @@ -1399,6 +1533,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.21.3.tgz", "integrity": "sha512-Wxc+TvppQG9xWFYatvCGPvZ6+SIUxQ2ZdiBP+PHYMIjnPXD+uThCshaz4NZOnODAtBjjcVQQ/3OKs9LW28purQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2" }, @@ -1414,6 +1549,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz", "integrity": "sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, @@ -1429,6 +1565,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.20.5.tgz", "integrity": "sha512-kW/oO7HPBtntbsahzQ0qSE3tFvkFwnbozz3NWFhLGqH75vLEg+sCGngLlhVkePlCs3Jv0dBBHDzCHxNiFAQKCQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2", "regenerator-transform": "^0.15.1" @@ -1445,6 +1582,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz", "integrity": "sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, @@ -1460,6 +1598,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz", "integrity": "sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, @@ -1475,6 +1614,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.20.7.tgz", "integrity": "sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.20.2", "@babel/helper-skip-transparent-expression-wrappers": "^7.20.0" @@ -1491,6 +1631,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz", "integrity": "sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, @@ -1506,6 +1647,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz", "integrity": "sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.9" }, @@ -1521,6 +1663,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz", "integrity": "sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.9" }, @@ -1531,11 +1674,30 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-transform-typescript": { + "version": "7.21.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.21.3.tgz", + "integrity": "sha512-RQxPz6Iqt8T0uw/WsJNReuBpWpBqs/n7mNo18sKLoTbMp+UrEekhH+pKSVC7gWz+DNjo9gryfV8YzCiT45RgMw==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.18.6", + "@babel/helper-create-class-features-plugin": "^7.21.0", + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/plugin-syntax-typescript": "^7.20.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-transform-unicode-escapes": { "version": "7.18.10", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz", "integrity": "sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.18.9" }, @@ -1551,6 +1713,7 @@ "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz", "integrity": "sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-create-regexp-features-plugin": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" @@ -1567,6 +1730,7 @@ "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.21.4.tgz", "integrity": "sha512-2W57zHs2yDLm6GD5ZpvNn71lZ0B/iypSdIeq25OurDKji6AdzV07qp4s3n1/x5BqtiGaTrPN3nerlSCaC5qNTw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/compat-data": "^7.21.4", "@babel/helper-compilation-targets": "^7.21.4", @@ -1656,6 +1820,7 @@ "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz", "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.0.0", "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", @@ -1667,17 +1832,57 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/preset-typescript": { + "version": "7.21.4", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.21.4.tgz", + "integrity": "sha512-sMLNWY37TCdRH/bJ6ZeeOH1nPuanED7Ai9Y/vH31IPqalioJ6ZNFUWONsakhv4r4n+I6gm5lmoE0olkgib/j/A==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/helper-validator-option": "^7.21.0", + "@babel/plugin-syntax-jsx": "^7.21.4", + "@babel/plugin-transform-modules-commonjs": "^7.21.2", + "@babel/plugin-transform-typescript": "^7.21.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/register": { + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/register/-/register-7.21.0.tgz", + "integrity": "sha512-9nKsPmYDi5DidAqJaQooxIhsLJiNMkGr8ypQ8Uic7cIox7UCDsM7HuUGxdGT7mSDTYbqzIdsOWzfBton/YJrMw==", + "dev": true, + "dependencies": { + "clone-deep": "^4.0.1", + "find-cache-dir": "^2.0.0", + "make-dir": "^2.1.0", + "pirates": "^4.0.5", + "source-map-support": "^0.5.16" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/regjsgen": { "version": "0.8.0", "resolved": "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz", "integrity": "sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@babel/runtime": { "version": "7.21.0", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz", "integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==", "dev": true, + "license": "MIT", "dependencies": { "regenerator-runtime": "^0.13.11" }, @@ -1690,6 +1895,7 @@ "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz", "integrity": "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/code-frame": "^7.18.6", "@babel/parser": "^7.20.7", @@ -1704,6 +1910,7 @@ "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.4.tgz", "integrity": "sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/code-frame": "^7.21.4", "@babel/generator": "^7.21.4", @@ -1725,6 +1932,7 @@ "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -1734,6 +1942,7 @@ "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.4.tgz", "integrity": "sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-string-parser": "^7.19.4", "@babel/helper-validator-identifier": "^7.19.1", @@ -1748,6 +1957,7 @@ "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", "dev": true, + "license": "MIT", "dependencies": { "eslint-visitor-keys": "^3.3.0" }, @@ -1763,6 +1973,7 @@ "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.0.tgz", "integrity": "sha512-vITaYzIcNmjn5tF5uxcZ/ft7/RXGrMUIS9HalWckEOF6ESiwXKoMzAQf2UW0aVd6rnOeExTJVd5hmWXucBKGXQ==", "dev": true, + "license": "MIT", "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } @@ -1772,6 +1983,7 @@ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.2.tgz", "integrity": "sha512-3W4f5tDUra+pA+FzgugqL2pRimUTDJWKr7BINqOpkZrC0uYI0NIc0/JFgBROCU07HR6GieA5m3/rsPIhDmCXTQ==", "dev": true, + "license": "MIT", "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", @@ -1795,100 +2007,17 @@ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.38.0.tgz", "integrity": "sha512-IoD2MfUnOV58ghIHCiil01PcohxjbYR/qCxsoC+xNgUwh1EY8jOOrYmu3d3a71+tJJ23uscEV4X2HJWMsPJu4g==", "dev": true, + "license": "MIT", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/@gulp-sourcemaps/identity-map": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@gulp-sourcemaps/identity-map/-/identity-map-1.0.2.tgz", - "integrity": "sha512-ciiioYMLdo16ShmfHBXJBOFm3xPC4AuwO4xeRpFeHz7WK9PYsWCmigagG2XyzZpubK4a3qNKoUBDhbzHfa50LQ==", - "dev": true, - "dependencies": { - "acorn": "^5.0.3", - "css": "^2.2.1", - "normalize-path": "^2.1.1", - "source-map": "^0.6.0", - "through2": "^2.0.3" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/@gulp-sourcemaps/identity-map/node_modules/acorn": { - "version": "5.7.4", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", - "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/@gulp-sourcemaps/identity-map/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", - "dev": true, - "dependencies": { - "remove-trailing-separator": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@gulp-sourcemaps/identity-map/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/@gulp-sourcemaps/map-sources": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@gulp-sourcemaps/map-sources/-/map-sources-1.0.0.tgz", - "integrity": "sha512-o/EatdaGt8+x2qpb0vFLC/2Gug/xYPRXb6a+ET1wGYKozKN3krDWC/zZFZAtrzxJHuDL12mwdfEFKcKMNvc55A==", - "dev": true, - "dependencies": { - "normalize-path": "^2.0.1", - "through2": "^2.0.3" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/@gulp-sourcemaps/map-sources/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", - "dev": true, - "dependencies": { - "remove-trailing-separator": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@gulp-sourcemaps/map-sources/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, "node_modules/@humanwhocodes/config-array": { "version": "0.11.8", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@humanwhocodes/object-schema": "^1.2.1", "debug": "^4.1.1", @@ -1903,6 +2032,7 @@ "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=12.22" }, @@ -1915,13 +2045,15 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true + "dev": true, + "license": "BSD-3-Clause" }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", @@ -1936,6 +2068,7 @@ "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.0.0" } @@ -1945,6 +2078,7 @@ "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.0.0" } @@ -1953,13 +2087,15 @@ "version": "1.4.15", "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.18", "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", "dev": true, + "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "3.1.0", "@jridgewell/sourcemap-codec": "1.4.14" @@ -1969,13 +2105,15 @@ "version": "1.4.14", "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" @@ -1989,6 +2127,7 @@ "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "dev": true, + "license": "MIT", "engines": { "node": ">= 8" } @@ -2006,35 +2145,114 @@ "node": ">= 8" } }, + "node_modules/@pkgr/utils": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.3.1.tgz", + "integrity": "sha512-wfzX8kc1PMyUILA+1Z/EqoE4UCXGy0iRGMhPwdfae1+f0OXlLqCk+By+aMzgJBzR9AzS4CDizioG6Ss1gvAFJw==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "is-glob": "^4.0.3", + "open": "^8.4.0", + "picocolors": "^1.0.0", + "tiny-glob": "^0.2.9", + "tslib": "^2.4.0" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/@pkgr/utils/node_modules/tslib": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", + "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==", + "dev": true + }, + "node_modules/@types/async": { + "version": "3.2.18", + "resolved": "https://registry.npmjs.org/@types/async/-/async-3.2.18.tgz", + "integrity": "sha512-/IsuXp3B9R//uRLi40VlIYoMp7OzhkunPe2fDu7jGfQXI9y3CDCx6FC4juRLSqrpmLst3vgsiK536AAGJFl4Ww==", + "dev": true + }, + "node_modules/@types/browser-or-node": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@types/browser-or-node/-/browser-or-node-1.3.0.tgz", + "integrity": "sha512-MVetr65IR7RdJbUxVHsaPFaXAO8fi89zv1g8L/mHygh1Q7xnnK02XZLwfMh57FOpTO6gtnagoPMQ/UOFfctXRQ==", + "dev": true + }, "node_modules/@types/json-schema": { "version": "7.0.11", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, "node_modules/@types/lodash": { "version": "4.14.192", "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.192.tgz", "integrity": "sha512-km+Vyn3BYm5ytMO13k9KTp27O75rbQ0NFw+U//g+PX7VZyjCioXaRFisqSIJRECljcTv73G3i6BpglNGHgUQ5A==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime-types": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.1.tgz", + "integrity": "sha512-vXOTGVSLR2jMw440moWTC7H19iUyLtP3Z1YTj7cSsubOICinjMxFeb/V57v9QdyyPGbbWolUFSSmSiRSn94tFw==", "dev": true }, "node_modules/@types/node": { "version": "18.15.11", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz", "integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz", + "integrity": "sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==", "dev": true }, "node_modules/@types/semver": { "version": "7.3.13", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz", "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", - "dev": true + "dev": true, + "license": "MIT" + }, + "node_modules/@types/xml": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/xml/-/xml-1.0.8.tgz", + "integrity": "sha512-IptEZBtDwSPayCP8FmbordhAdjdxsif4zH29xTbBRacZeCHFHZp8OxyG1/CrS8AS0MziJUPTGWCTKbYtvHGYPg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/xml2js": { + "version": "0.4.11", + "resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz", + "integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==", + "dev": true, + "dependencies": { + "@types/node": "*" + } }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "5.57.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.57.1.tgz", "integrity": "sha512-1MeobQkQ9tztuleT3v72XmY0XuKXVXusAhryoLuU5YZ+mXoYKZP9SQ7Flulh1NX4DTjpGTc2b/eMu4u7M7dhnQ==", "dev": true, + "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.4.0", "@typescript-eslint/scope-manager": "5.57.1", @@ -2069,6 +2287,7 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -2081,6 +2300,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "dev": true, + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -2095,13 +2315,15 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/@typescript-eslint/parser": { "version": "5.57.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.57.1.tgz", "integrity": "sha512-hlA0BLeVSA/wBPKdPGxoVr9Pp6GutGoY380FEhbVi0Ph4WNe8kLvqIRx76RSQt1lynZKfrXKs0/XeEk4zZycuA==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "@typescript-eslint/scope-manager": "5.57.1", "@typescript-eslint/types": "5.57.1", @@ -2129,6 +2351,7 @@ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.57.1.tgz", "integrity": "sha512-N/RrBwEUKMIYxSKl0oDK5sFVHd6VI7p9K5MyUlVYAY6dyNb/wHUqndkTd3XhpGlXgnQsBkRZuu4f9kAHghvgPw==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/types": "5.57.1", "@typescript-eslint/visitor-keys": "5.57.1" @@ -2146,6 +2369,7 @@ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.57.1.tgz", "integrity": "sha512-/RIPQyx60Pt6ga86hKXesXkJ2WOS4UemFrmmq/7eOyiYjYv/MUSHPlkhU6k9T9W1ytnTJueqASW+wOmW4KrViw==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/typescript-estree": "5.57.1", "@typescript-eslint/utils": "5.57.1", @@ -2173,6 +2397,7 @@ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.57.1.tgz", "integrity": "sha512-bSs4LOgyV3bJ08F5RDqO2KXqg3WAdwHCu06zOqcQ6vqbTJizyBhuh1o1ImC69X4bV2g1OJxbH71PJqiO7Y1RuA==", "dev": true, + "license": "MIT", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -2186,6 +2411,7 @@ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.57.1.tgz", "integrity": "sha512-A2MZqD8gNT0qHKbk2wRspg7cHbCDCk2tcqt6ScCFLr5Ru8cn+TCfM786DjPhqwseiS+PrYwcXht5ztpEQ6TFTw==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "@typescript-eslint/types": "5.57.1", "@typescript-eslint/visitor-keys": "5.57.1", @@ -2213,6 +2439,7 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -2225,6 +2452,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "dev": true, + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -2239,13 +2467,15 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/@typescript-eslint/utils": { "version": "5.57.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.57.1.tgz", "integrity": "sha512-kN6vzzf9NkEtawECqze6v99LtmDiUJCVpvieTFA1uL7/jDghiJGubGZ5csicYHU1Xoqb3oH/R5cN5df6W41Nfg==", "dev": true, + "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@types/json-schema": "^7.0.9", @@ -2272,6 +2502,7 @@ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" @@ -2285,6 +2516,7 @@ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=4.0" } @@ -2294,6 +2526,7 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -2306,6 +2539,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "dev": true, + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -2320,13 +2554,15 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/@typescript-eslint/visitor-keys": { "version": "5.57.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.57.1.tgz", "integrity": "sha512-RjQrAniDU0CEk5r7iphkm731zKlFiUjvcBS2yHAg8WWqFMCaCrD0rKEVOMUyMMcbGPZ0bPp56srkGWrgfZqLRA==", "dev": true, + "license": "MIT", "dependencies": { "@typescript-eslint/types": "5.57.1", "eslint-visitor-keys": "^3.3.0" @@ -2343,12 +2579,23 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", - "dev": true + "dev": true, + "license": "ISC" + }, + "node_modules/@upleveled/babel-plugin-remove-node-prefix": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@upleveled/babel-plugin-remove-node-prefix/-/babel-plugin-remove-node-prefix-1.0.4.tgz", + "integrity": "sha512-EBiMQNjGgDWhe/BcDRbb1R4q4SqS9bMH+NDFZMVMk1XrEHUr4Q5kMKZYDtj79y5QSASYCMQ29dLk9SvCv6haVQ==", + "dev": true, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } }, "node_modules/@zxing/text-encoding": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/@zxing/text-encoding/-/text-encoding-0.9.0.tgz", "integrity": "sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA==", + "license": "(Unlicense OR Apache-2.0)", "optional": true }, "node_modules/acorn": { @@ -2356,6 +2603,7 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==", "dev": true, + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -2368,42 +2616,11 @@ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, + "license": "MIT", "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/acorn-node": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/acorn-node/-/acorn-node-1.8.2.tgz", - "integrity": "sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==", - "dev": true, - "dependencies": { - "acorn": "^7.0.0", - "acorn-walk": "^7.0.0", - "xtend": "^4.0.2" - } - }, - "node_modules/acorn-node/node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-node/node_modules/acorn-walk": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", - "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/aggregate-error": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", @@ -2422,6 +2639,7 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, + "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -2433,18 +2651,6 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/ansi-colors": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-1.1.0.tgz", - "integrity": "sha512-SFKX67auSNoVR38N3L+nvsPjOE0bybKTYbkf5tRvushrAPQ9V75huw0ZxBkKVeRU9kqH3d6HA4xTckbwZ4ixmA==", - "dev": true, - "dependencies": { - "ansi-wrap": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ansi-escapes": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", @@ -2472,23 +2678,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ansi-gray": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ansi-gray/-/ansi-gray-0.1.1.tgz", - "integrity": "sha512-HrgGIZUl8h2EHuZaU9hTR/cU5nhKxpVE1V6kdGsQ8e4zirElJ5fvtfc8N7Q1oq1aatO275i8pUFUCpNWCAnVWw==", - "dev": true, - "dependencies": { - "ansi-wrap": "0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -2498,6 +2693,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -2508,365 +2704,89 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/ansi-wrap": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz", - "integrity": "sha512-ZyznvL8k/FZeQHr2T6LzcJ/+vBApDnMNZvfVFy3At0knswWd6rJ3/0Hhmpu8oqa6C92npmozs890sX9Dl6q+Qw==", + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true, - "engines": { - "node": ">=0.10.0" - } + "license": "Python-2.0" }, - "node_modules/anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "node_modules/array-buffer-byte-length": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", + "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", "dev": true, "dependencies": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" + "call-bind": "^1.0.2", + "is-array-buffer": "^3.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/anymatch/node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "node_modules/array-includes": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", + "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", "dev": true, "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "get-intrinsic": "^1.1.3", + "is-string": "^1.0.7" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/anymatch/node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/anymatch/node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "node_modules/array.prototype.flat": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", + "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", "dev": true, "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "es-shim-unscopables": "^1.0.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/anymatch/node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/anymatch/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/anymatch/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/anymatch/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/anymatch/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/anymatch/node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/anymatch/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", - "dev": true, - "dependencies": { - "remove-trailing-separator": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/anymatch/node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", - "dev": true, - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/append-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/append-buffer/-/append-buffer-1.0.2.tgz", - "integrity": "sha512-WLbYiXzD3y/ATLZFufV/rZvWdZOs+Z/+5v1rBZ463Jn398pa6kcde27cvozYnBoxXblGZTFfoPpsaEw0orU5BA==", - "dev": true, - "dependencies": { - "buffer-equal": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/archy": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", - "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==", - "dev": true - }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "node_modules/arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-filter": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/arr-filter/-/arr-filter-1.1.2.tgz", - "integrity": "sha512-A2BETWCqhsecSvCkWAeVBFLH6sXEUGASuzkpjL3GR1SlL/PWL6M3J8EAAld2Uubmh39tvkJTqC9LeLHCUKmFXA==", - "dev": true, - "dependencies": { - "make-iterator": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-map": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/arr-map/-/arr-map-2.0.2.tgz", - "integrity": "sha512-tVqVTHt+Q5Xb09qRkbu+DidW1yYzz5izWS2Xm2yFm7qJnmUfz4HPzNxbHkdRJbz2lrqI7S+z17xNYdFcBBO8Hw==", - "dev": true, - "dependencies": { - "make-iterator": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/array-each": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/array-each/-/array-each-1.0.1.tgz", - "integrity": "sha512-zHjL5SZa68hkKHBFBK6DJCTtr9sfTCPCaph/L7tMSLcTFgy+zX7E+6q5UArbtOtMBCtxdICpfTCspRse+ywyXA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/array-initial": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/array-initial/-/array-initial-1.1.0.tgz", - "integrity": "sha512-BC4Yl89vneCYfpLrs5JU2aAu9/a+xWbeKhvISg9PT7eWFB9UlRvI+rKEtk6mgxWr3dSkk9gQ8hCrdqt06NXPdw==", - "dev": true, - "dependencies": { - "array-slice": "^1.0.0", - "is-number": "^4.0.0" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array-last": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/array-last/-/array-last-1.3.0.tgz", - "integrity": "sha512-eOCut5rXlI6aCOS7Z7kCplKRKyiFQ6dHFBem4PwlwKeNFk2/XxTrhRh5T9PyaEWGy/NHTZWbY+nsZlNFJu9rYg==", + "node_modules/array.prototype.flatmap": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", + "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", "dev": true, "dependencies": { - "is-number": "^4.0.0" + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "es-shim-unscopables": "^1.0.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/array-slice": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/array-slice/-/array-slice-1.1.0.tgz", - "integrity": "sha512-B1qMD3RBP7O8o0H2KbrXDyB0IccejMF15+87Lvlor12ONPRHP6gTjXMNkt/d3ZuOGbAe66hFmaCfECI24Ufp6w==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/array-sort": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-sort/-/array-sort-1.0.0.tgz", - "integrity": "sha512-ihLeJkonmdiAsD7vpgN3CRcx2J2S0TiYW+IS/5zHBI7mKUq3ySvBdzzBfD236ubDBQFiiyG3SWCPc+msQ9KoYg==", - "dev": true, - "dependencies": { - "default-compare": "^1.0.0", - "get-value": "^2.0.6", - "kind-of": "^5.0.2" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/asn1.js": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", - "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", - "dependencies": { - "bn.js": "^4.0.0", - "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0", - "safer-buffer": "^2.1.0" - } - }, - "node_modules/asn1.js/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" - }, - "node_modules/assert": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/assert/-/assert-1.5.0.tgz", - "integrity": "sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA==", - "dev": true, - "dependencies": { - "object-assign": "^4.1.1", - "util": "0.10.3" - } - }, - "node_modules/assert/node_modules/inherits": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "integrity": "sha512-8nWq2nLTAwd02jTqJExUYFSD/fKq6VH9Y/oG2accc/kdI0V98Bag8d5a4gi3XHz73rDWa2PvTtvcWYquKqSENA==", - "dev": true - }, - "node_modules/assert/node_modules/util": { - "version": "0.10.3", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", - "integrity": "sha512-5KiHfsmkqacuKjkRkdV7SsfDJ2EGiPsK92s2MhNSY0craxjTdKTtqKsJaCWp4LW33ZZ0OPUv1WO/TFvNQRiQxQ==", - "dev": true, - "dependencies": { - "inherits": "2.0.1" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/assertion-error": { @@ -2874,19 +2794,11 @@ "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", "dev": true, + "license": "MIT", "engines": { "node": "*" } }, - "node_modules/assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/astral-regex": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", @@ -2899,46 +2811,8 @@ "node_modules/async": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", - "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==" - }, - "node_modules/async-done": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/async-done/-/async-done-1.3.2.tgz", - "integrity": "sha512-uYkTP8dw2og1tu1nmza1n1CMW0qb8gWWlwqMmLb7MhBVs4BXrFziT6HXUd+/RlRA/i4H9AkofYloUbs1fwMqlw==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.2", - "process-nextick-args": "^2.0.0", - "stream-exhaust": "^1.0.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/async-each": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.6.tgz", - "integrity": "sha512-c646jH1avxr+aVpndVMeAfYw7wAa6idufrlN3LPA4PmKS0QEGp6PIC9nwz0WQkkvBGAMEki3pFdtxaF39J9vvg==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - ] - }, - "node_modules/async-settle": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/async-settle/-/async-settle-1.0.0.tgz", - "integrity": "sha512-VPXfB4Vk49z1LHHodrEQ6Xf7W4gg1w0dAPROHngx7qgDjqmIQ+fXmwgGXTW/ITLai0YLSvWepJOP9EVpMnEAcw==", - "dev": true, - "dependencies": { - "async-done": "^1.2.2" - }, - "engines": { - "node": ">= 0.10" - } + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==", + "license": "MIT" }, "node_modules/asynckit": { "version": "0.4.0", @@ -2946,22 +2820,11 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", "dev": true }, - "node_modules/atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", - "dev": true, - "bin": { - "atob": "bin/atob.js" - }, - "engines": { - "node": ">= 4.5.0" - } - }, "node_modules/available-typed-arrays": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -2974,6 +2837,7 @@ "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz", "integrity": "sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q==", "dev": true, + "license": "MIT", "dependencies": { "@babel/compat-data": "^7.17.7", "@babel/helper-define-polyfill-provider": "^0.3.3", @@ -2988,6 +2852,7 @@ "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz", "integrity": "sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-define-polyfill-provider": "^0.3.3", "core-js-compat": "^3.25.1" @@ -3001,6 +2866,7 @@ "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz", "integrity": "sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw==", "dev": true, + "license": "MIT", "dependencies": { "@babel/helper-define-polyfill-provider": "^0.3.3" }, @@ -3008,99 +2874,37 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/babelify": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/babelify/-/babelify-10.0.0.tgz", - "integrity": "sha512-X40FaxyH7t3X+JFAKvb1H9wooWKLRCi8pg3m8poqtdZaIng+bjzp9RvKQCvRjF9isHiPkXspbbXT/zwXLtwgwg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } + "node_modules/babel-plugin-replace-import-extension": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/babel-plugin-replace-import-extension/-/babel-plugin-replace-import-extension-1.1.3.tgz", + "integrity": "sha512-NmHOpGOLqSnZgefu/rmCviGIlp51WLGk8OY9CiQmp9qrpBy6jFVNvxIP4jR9WXZlNOPfBdGpuhPJe8upV4DTGw==", + "dev": true }, - "node_modules/bach": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/bach/-/bach-1.2.0.tgz", - "integrity": "sha512-bZOOfCb3gXBXbTFXq3OZtGR88LwGeJvzu6szttaIzymOTS4ZttBNOWSv7aLZja2EMycKtRYV0Oa8SNKH/zkxvg==", + "node_modules/babel-plugin-transform-replace-expressions": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-replace-expressions/-/babel-plugin-transform-replace-expressions-0.2.0.tgz", + "integrity": "sha512-Eh1rRd9hWEYgkgoA3D0kGp7xJ/wgVshgsqmq60iC4HVWD+Lux+fNHSHBa2v1Hsv+dHflShC71qKhiH40OiPtDA==", "dev": true, "dependencies": { - "arr-filter": "^1.1.1", - "arr-flatten": "^1.0.1", - "arr-map": "^2.0.0", - "array-each": "^1.0.0", - "array-initial": "^1.0.0", - "array-last": "^1.1.1", - "async-done": "^1.2.2", - "async-settle": "^1.0.0", - "now-and-later": "^2.0.0" + "@babel/parser": "^7.3.3" }, - "engines": { - "node": ">= 0.10" + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true - }, - "node_modules/base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", - "dev": true, - "dependencies": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] + "license": "MIT" }, "node_modules/binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -3109,39 +2913,24 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/block-stream2/-/block-stream2-2.1.0.tgz", "integrity": "sha512-suhjmLI57Ewpmq00qaygS8UgEq2ly2PCItenIyhMqVjo4t4pGzqMvfgJuX8iWTeSDdfSSqS6j38fL4ToNL7Pfg==", + "license": "MIT", "dependencies": { "readable-stream": "^3.4.0" } }, - "node_modules/block-stream2/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/bluebird": { "version": "3.7.2", "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, - "node_modules/bn.js": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", - "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" + "dev": true, + "license": "MIT" }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -3152,6 +2941,7 @@ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "dev": true, + "license": "MIT", "dependencies": { "fill-range": "^7.0.1" }, @@ -3159,225 +2949,18 @@ "node": ">=8" } }, - "node_modules/brorand": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", - "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==" - }, "node_modules/browser-or-node": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/browser-or-node/-/browser-or-node-1.3.0.tgz", - "integrity": "sha512-0F2z/VSnLbmEeBcUrSuDH5l0HxTXdQQzLjkmBR4cYfvg1zJrKSlmIZFqyFR8oX0NrwPhy3c3HQ6i3OxMbew4Tg==" - }, - "node_modules/browser-pack": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/browser-pack/-/browser-pack-6.1.0.tgz", - "integrity": "sha512-erYug8XoqzU3IfcU8fUgyHqyOXqIE4tUTTQ+7mqUjQlvnXkOO6OlT9c/ZoJVHYoAaqGxr09CN53G7XIsO4KtWA==", - "dev": true, - "dependencies": { - "combine-source-map": "~0.8.0", - "defined": "^1.0.0", - "JSONStream": "^1.0.3", - "safe-buffer": "^5.1.1", - "through2": "^2.0.0", - "umd": "^3.0.0" - }, - "bin": { - "browser-pack": "bin/cmd.js" - } - }, - "node_modules/browser-pack/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/browser-resolve": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/browser-resolve/-/browser-resolve-2.0.0.tgz", - "integrity": "sha512-7sWsQlYL2rGLy2IWm8WL8DCTJvYLc/qlOnsakDac87SOoCd16WLsaAMdCiAqsTNHIe+SXfaqyxyo6THoWqs8WQ==", - "dev": true, - "dependencies": { - "resolve": "^1.17.0" - } + "integrity": "sha512-0F2z/VSnLbmEeBcUrSuDH5l0HxTXdQQzLjkmBR4cYfvg1zJrKSlmIZFqyFR8oX0NrwPhy3c3HQ6i3OxMbew4Tg==", + "license": "MIT" }, "node_modules/browser-stdout": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", - "dev": true - }, - "node_modules/browserify": { - "version": "16.5.2", - "resolved": "https://registry.npmjs.org/browserify/-/browserify-16.5.2.tgz", - "integrity": "sha512-TkOR1cQGdmXU9zW4YukWzWVSJwrxmNdADFbqbE3HFgQWe5wqZmOawqZ7J/8MPCwk/W8yY7Y0h+7mOtcZxLP23g==", - "dev": true, - "dependencies": { - "assert": "^1.4.0", - "browser-pack": "^6.0.1", - "browser-resolve": "^2.0.0", - "browserify-zlib": "~0.2.0", - "buffer": "~5.2.1", - "cached-path-relative": "^1.0.0", - "concat-stream": "^1.6.0", - "console-browserify": "^1.1.0", - "constants-browserify": "~1.0.0", - "crypto-browserify": "^3.0.0", - "defined": "^1.0.0", - "deps-sort": "^2.0.0", - "domain-browser": "^1.2.0", - "duplexer2": "~0.1.2", - "events": "^2.0.0", - "glob": "^7.1.0", - "has": "^1.0.0", - "htmlescape": "^1.1.0", - "https-browserify": "^1.0.0", - "inherits": "~2.0.1", - "insert-module-globals": "^7.0.0", - "JSONStream": "^1.0.3", - "labeled-stream-splicer": "^2.0.0", - "mkdirp-classic": "^0.5.2", - "module-deps": "^6.2.3", - "os-browserify": "~0.3.0", - "parents": "^1.0.1", - "path-browserify": "~0.0.0", - "process": "~0.11.0", - "punycode": "^1.3.2", - "querystring-es3": "~0.2.0", - "read-only-stream": "^2.0.0", - "readable-stream": "^2.0.2", - "resolve": "^1.1.4", - "shasum": "^1.0.0", - "shell-quote": "^1.6.1", - "stream-browserify": "^2.0.0", - "stream-http": "^3.0.0", - "string_decoder": "^1.1.1", - "subarg": "^1.0.0", - "syntax-error": "^1.1.1", - "through2": "^2.0.0", - "timers-browserify": "^1.0.1", - "tty-browserify": "0.0.1", - "url": "~0.11.0", - "util": "~0.10.1", - "vm-browserify": "^1.0.0", - "xtend": "^4.0.0" - }, - "bin": { - "browserify": "bin/cmd.js" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/browserify-aes": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", - "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", - "dependencies": { - "buffer-xor": "^1.0.3", - "cipher-base": "^1.0.0", - "create-hash": "^1.1.0", - "evp_bytestokey": "^1.0.3", - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/browserify-cipher": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", - "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", - "dependencies": { - "browserify-aes": "^1.0.4", - "browserify-des": "^1.0.0", - "evp_bytestokey": "^1.0.0" - } - }, - "node_modules/browserify-des": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", - "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", - "dependencies": { - "cipher-base": "^1.0.1", - "des.js": "^1.0.0", - "inherits": "^2.0.1", - "safe-buffer": "^5.1.2" - } - }, - "node_modules/browserify-rsa": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", - "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", - "dependencies": { - "bn.js": "^5.0.0", - "randombytes": "^2.0.1" - } - }, - "node_modules/browserify-sign": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz", - "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==", - "dependencies": { - "bn.js": "^5.1.1", - "browserify-rsa": "^4.0.1", - "create-hash": "^1.2.0", - "create-hmac": "^1.1.7", - "elliptic": "^6.5.3", - "inherits": "^2.0.4", - "parse-asn1": "^5.1.5", - "readable-stream": "^3.6.0", - "safe-buffer": "^5.2.0" - } - }, - "node_modules/browserify-sign/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/browserify-zlib": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", - "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", - "dev": true, - "dependencies": { - "pako": "~1.0.5" - } - }, - "node_modules/browserify/node_modules/inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==", - "dev": true - }, - "node_modules/browserify/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/browserify/node_modules/util": { - "version": "0.10.4", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", - "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==", "dev": true, - "dependencies": { - "inherits": "2.0.3" - } + "license": "ISC" }, "node_modules/browserslist": { "version": "4.21.5", @@ -3394,6 +2977,7 @@ "url": "https://tidelift.com/funding/github/npm/browserslist" } ], + "license": "MIT", "dependencies": { "caniuse-lite": "^1.0.30001449", "electron-to-chromium": "^1.4.284", @@ -3407,83 +2991,39 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, - "node_modules/buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.2.1.tgz", - "integrity": "sha512-c+Ko0loDaFfuPWiL02ls9Xd3GO3cPVmUobQ6t3rXNUk304u6hGq+8N/kFi+QEIKhzK3uwolVhLzszmfLmMLnqg==", - "dev": true, - "dependencies": { - "base64-js": "^1.0.2", - "ieee754": "^1.1.4" - } - }, "node_modules/buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "license": "MIT", "engines": { "node": "*" } }, - "node_modules/buffer-equal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal/-/buffer-equal-1.0.1.tgz", - "integrity": "sha512-QoV3ptgEaQpvVwbXdSO39iqPQTCxSF7A5U99AxbHYqUdCizL/lH2Z0A2y6nbZucxMEOtNyZfG2s6gsVugGpKkg==", - "dev": true, - "engines": { - "node": ">=0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true - }, - "node_modules/buffer-xor": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", - "integrity": "sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ==" - }, - "node_modules/builtin-status-codes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", - "integrity": "sha512-HpGFw18DgFWlncDfjTa2rcQ4W88O1mC8e8yZ2AvQY5KDaktSTwo+KRf6nHK6FRI5FyRyb/5T6+TSxfP7QyGsmQ==", - "dev": true + "dev": true, + "license": "MIT" }, - "node_modules/cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "node_modules/builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", "dev": true, - "dependencies": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" - }, "engines": { - "node": ">=0.10.0" + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/cached-path-relative": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.1.0.tgz", - "integrity": "sha512-WF0LihfemtesFcJgO7xfOoOcnWzY/QHR4qeDqV44jPU3HTI54+LnfXK3SA27AVVGCdZFgjjFFaqUA9Jx7dMJZA==", - "dev": true - }, "node_modules/call-bind": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "license": "MIT", "dependencies": { "function-bind": "^1.1.1", "get-intrinsic": "^1.0.2" @@ -3497,19 +3037,11 @@ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, - "node_modules/camelcase": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-3.0.0.tgz", - "integrity": "sha512-4nhGqUkc4BqbBBB4Q6zLuD7lzzrHYrjKGeYaEji/3tFR5VdJu9v+LilhGIVe8wxEJPPOeWo7eg8dwY13TZ1BNg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/caniuse-lite": { "version": "1.0.30001476", "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001476.tgz", @@ -3528,13 +3060,15 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ] + ], + "license": "CC-BY-4.0" }, "node_modules/chai": { "version": "4.3.7", "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz", "integrity": "sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==", "dev": true, + "license": "MIT", "dependencies": { "assertion-error": "^1.1.0", "check-error": "^1.0.2", @@ -3553,6 +3087,7 @@ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -3569,6 +3104,7 @@ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -3576,17 +3112,12 @@ "node": ">=8" } }, - "node_modules/chardet": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", - "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", - "dev": true - }, "node_modules/check-error": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", "integrity": "sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==", "dev": true, + "license": "MIT", "engines": { "node": "*" } @@ -3602,6 +3133,7 @@ "url": "https://paulmillr.com/funding/" } ], + "license": "MIT", "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -3623,6 +3155,7 @@ "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", "dev": true, + "license": "ISC", "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" @@ -3631,102 +3164,40 @@ "node": ">= 8" } }, - "node_modules/cipher-base": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", - "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", - "dev": true, - "dependencies": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/class-utils/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/class-utils/node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/class-utils/node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/class-utils/node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==", + "node_modules/ci-info": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz", + "integrity": "sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==", "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/class-utils/node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "node_modules/clean-regexp": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clean-regexp/-/clean-regexp-1.0.0.tgz", + "integrity": "sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==", "dev": true, "dependencies": { - "is-buffer": "^1.1.5" + "escape-string-regexp": "^1.0.5" }, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, - "node_modules/class-utils/node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", + "node_modules/clean-regexp/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "dev": true, - "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, "engines": { - "node": ">=0.10.0" + "node": ">=0.8.0" } }, "node_modules/clean-stack": { @@ -3766,20 +3237,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/cli-width": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", - "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", - "dev": true, - "engines": { - "node": ">= 10" - } - }, "node_modules/cliui": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "dev": true, + "license": "ISC", "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", @@ -3791,6 +3254,7 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -3800,6 +3264,7 @@ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -3809,75 +3274,18 @@ "node": ">=8" } }, - "node_modules/clone": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", - "integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==", - "dev": true, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/clone-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz", - "integrity": "sha512-KLLTJWrvwIP+OPfMn0x2PheDEP20RPUcGXj/ERegTgdmPEZylALQldygiqrPPu8P45uNuPs7ckmReLY6v/iA5g==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/clone-stats": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz", - "integrity": "sha512-au6ydSpg6nsrigcZ4m8Bc9hxjeW+GJ8xh5G3BJCMt4WXe1H10UNaVOamqQTmrx1kjVuxAHIQSNU6hY4Nsn9/ag==", - "dev": true - }, - "node_modules/cloneable-readable": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/cloneable-readable/-/cloneable-readable-1.1.3.tgz", - "integrity": "sha512-2EF8zTQOxYq70Y4XKtorQupqF0m49MBz2/yf5Bj+MHjvpG3Hy7sImifnqD6UA+TKYxeSV+u6qqQPawN5UvnpKQ==", - "dev": true, - "dependencies": { - "inherits": "^2.0.1", - "process-nextick-args": "^2.0.0", - "readable-stream": "^2.3.5" - } - }, - "node_modules/code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/collection-map": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-map/-/collection-map-1.0.0.tgz", - "integrity": "sha512-5D2XXSpkOnleOI21TG7p3T0bGAsZ/XknZpKBmGYyluO8pw4zA3K8ZlrBIbC4FXg3m6z/RNFiUFfT2sQK01+UHA==", - "dev": true, - "dependencies": { - "arr-map": "^2.0.2", - "for-own": "^1.0.0", - "make-iterator": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw==", + "node_modules/clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", "dev": true, "dependencies": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=6" } }, "node_modules/color-convert": { @@ -3885,6 +3293,7 @@ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -3896,16 +3305,8 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", "dev": true, - "bin": { - "color-support": "bin.js" - } + "license": "MIT" }, "node_modules/colorette": { "version": "2.0.19", @@ -3913,33 +3314,6 @@ "integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==", "dev": true }, - "node_modules/combine-source-map": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/combine-source-map/-/combine-source-map-0.8.0.tgz", - "integrity": "sha512-UlxQ9Vw0b/Bt/KYwCFqdEwsQ1eL8d1gibiFb7lxQJFdvTgc2hIZi6ugsg+kyhzhPV+QEpUiEIwInIAIrgoEkrg==", - "dev": true, - "dependencies": { - "convert-source-map": "~1.1.0", - "inline-source-map": "~0.6.0", - "lodash.memoize": "~3.0.3", - "source-map": "~0.5.3" - } - }, - "node_modules/combine-source-map/node_modules/convert-source-map": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.1.3.tgz", - "integrity": "sha512-Y8L5rp6jo+g9VEPgvqNfEopjTR4OTYct8lXlS8iVQdmnjDvbdbzYe9rjtFCB9egC86JoNCU61WRY+ScjkZpnIg==", - "dev": true - }, - "node_modules/combine-source-map/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -3961,81 +3335,39 @@ "node": ">=14" } }, + "node_modules/commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", + "dev": true + }, "node_modules/component-emitter": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true - }, - "node_modules/concat-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", - "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", "dev": true, - "engines": [ - "node >= 0.8" - ], - "dependencies": { - "buffer-from": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^2.2.2", - "typedarray": "^0.0.6" - } - }, - "node_modules/console-browserify": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", - "integrity": "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==", - "dev": true - }, - "node_modules/constants-browserify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", - "integrity": "sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ==", - "dev": true - }, - "node_modules/convert-source-map": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", - "dev": true + "license": "MIT" }, "node_modules/cookiejar": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz", "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", - "dev": true - }, - "node_modules/copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw==", "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/copy-props": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/copy-props/-/copy-props-2.0.5.tgz", - "integrity": "sha512-XBlx8HSqrT0ObQwmSzM7WE5k8FxTV75h1DX1Z3n6NhQ/UYYAvInWYmG06vFt7hQZArE2fuO62aihiWIVQwh1sw==", - "dev": true, - "dependencies": { - "each-props": "^1.3.2", - "is-plain-object": "^5.0.0" - } + "license": "MIT" }, "node_modules/core-js-compat": { "version": "3.30.0", "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.30.0.tgz", "integrity": "sha512-P5A2h/9mRYZFIAP+5Ab8ns6083IyVpSclU74UNvbGVQ8VM7n3n3/g2yF3AkKQ9NXz2O+ioxLbEWKnDtgsFamhg==", "dev": true, + "license": "MIT", "dependencies": { "browserslist": "^4.21.5" }, @@ -4044,56 +3376,12 @@ "url": "https://opencollective.com/core-js" } }, - "node_modules/core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "dev": true - }, - "node_modules/create-ecdh": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz", - "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==", - "dependencies": { - "bn.js": "^4.1.0", - "elliptic": "^6.5.3" - } - }, - "node_modules/create-ecdh/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" - }, - "node_modules/create-hash": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", - "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", - "dependencies": { - "cipher-base": "^1.0.1", - "inherits": "^2.0.1", - "md5.js": "^1.3.4", - "ripemd160": "^2.0.1", - "sha.js": "^2.4.0" - } - }, - "node_modules/create-hmac": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", - "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", - "dependencies": { - "cipher-base": "^1.0.3", - "create-hash": "^1.1.0", - "inherits": "^2.0.1", - "ripemd160": "^2.0.0", - "safe-buffer": "^5.0.1", - "sha.js": "^2.4.8" - } - }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -4103,69 +3391,12 @@ "node": ">= 8" } }, - "node_modules/crypto-browserify": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", - "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", - "dependencies": { - "browserify-cipher": "^1.0.0", - "browserify-sign": "^4.0.0", - "create-ecdh": "^4.0.0", - "create-hash": "^1.1.0", - "create-hmac": "^1.1.0", - "diffie-hellman": "^5.0.0", - "inherits": "^2.0.1", - "pbkdf2": "^3.0.3", - "public-encrypt": "^4.0.0", - "randombytes": "^2.0.0", - "randomfill": "^1.0.3" - }, - "engines": { - "node": "*" - } - }, - "node_modules/css": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/css/-/css-2.2.4.tgz", - "integrity": "sha512-oUnjmWpy0niI3x/mPL8dVEI1l7MnG3+HHyRPHf+YFSbK+svOhXpmSOcDURUh2aOCgl2grzrOPt1nHLuCVFULLw==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "source-map": "^0.6.1", - "source-map-resolve": "^0.5.2", - "urix": "^0.1.0" - } - }, - "node_modules/d": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz", - "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==", - "dev": true, - "dependencies": { - "es5-ext": "^0.10.50", - "type": "^1.0.1" - } - }, - "node_modules/dargs": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/dargs/-/dargs-7.0.0.tgz", - "integrity": "sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/dash-ast": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/dash-ast/-/dash-ast-1.0.0.tgz", - "integrity": "sha512-Vy4dx7gquTeMcQR/hDkYLGUnwVil6vk4FOOct+djUnHOUWt+zJPJAaRIXaAFkPXtJjvlY7o3rfRu0/3hpnwoUA==", - "dev": true - }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dev": true, + "license": "MIT", "dependencies": { "ms": "2.1.2" }, @@ -4178,31 +3409,12 @@ } } }, - "node_modules/debug-fabulous": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/debug-fabulous/-/debug-fabulous-1.1.0.tgz", - "integrity": "sha512-GZqvGIgKNlUnHUPQhepnUZFIMoi3dgZKQBzKDeL2g7oJF9SNAji/AAu36dusFUas0O+pae74lNeoIPHqXWDkLg==", - "dev": true, - "dependencies": { - "debug": "3.X", - "memoizee": "0.4.X", - "object-assign": "4.X" - } - }, - "node_modules/debug-fabulous/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, "node_modules/decamelize": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -4214,6 +3426,7 @@ "version": "0.2.2", "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==", + "license": "MIT", "engines": { "node": ">=0.10" } @@ -4223,6 +3436,7 @@ "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", "dev": true, + "license": "MIT", "dependencies": { "type-detect": "^4.0.0" }, @@ -4234,27 +3448,16 @@ "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true - }, - "node_modules/default-compare": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/default-compare/-/default-compare-1.0.0.tgz", - "integrity": "sha512-QWfXlM0EkAbqOCbD/6HjdwT19j7WCkMyiRhWilc4H9/5h/RzTF9gv5LYh1+CmDV5d1rki6KAWLtQale0xt20eQ==", "dev": true, - "dependencies": { - "kind-of": "^5.0.2" - }, - "engines": { - "node": ">=0.10.0" - } + "license": "MIT" }, - "node_modules/default-resolution": { + "node_modules/define-lazy-prop": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/default-resolution/-/default-resolution-2.0.0.tgz", - "integrity": "sha512-2xaP6GiwVwOEbXCGoJ4ufgC76m8cj805jrghScewJC2ZDsb9U0b4BIrba+xt/Uytyd0HvQ6+WymSRTfnYj59GQ==", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", "dev": true, "engines": { - "node": ">= 0.10" + "node": ">=8" } }, "node_modules/define-properties": { @@ -4273,28 +3476,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/define-property": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", - "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/defined": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.1.tgz", - "integrity": "sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -4304,104 +3485,22 @@ "node": ">=0.4.0" } }, - "node_modules/deps-sort": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/deps-sort/-/deps-sort-2.0.1.tgz", - "integrity": "sha512-1orqXQr5po+3KI6kQb9A4jnXT1PBwggGl2d7Sq2xsnOeI9GPcE/tGcF9UiSZtZBM7MukY4cAh7MemS6tZYipfw==", - "dev": true, - "dependencies": { - "JSONStream": "^1.0.3", - "shasum-object": "^1.0.0", - "subarg": "^1.0.0", - "through2": "^2.0.0" - }, - "bin": { - "deps-sort": "bin/cmd.js" - } - }, - "node_modules/deps-sort/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/des.js": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz", - "integrity": "sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==", - "dependencies": { - "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0" - } - }, - "node_modules/detect-file": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/detect-file/-/detect-file-1.0.0.tgz", - "integrity": "sha512-DtCOLG98P007x7wiiOmfI0fi3eIKyWiLTGJ2MDnVi/E04lWGbf+JzrRHMm0rgIIZJGtHpKpbVgLWHrv8xXpc3Q==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/detect-newline": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-2.1.0.tgz", - "integrity": "sha512-CwffZFvlJffUg9zZA0uqrjQayUTC8ob94pnr5sFwaVv3IOmkfUHcWH+jXaQK3askE51Cqe8/9Ql/0uXNwqZ8Zg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/detective": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/detective/-/detective-5.2.1.tgz", - "integrity": "sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw==", - "dev": true, - "dependencies": { - "acorn-node": "^1.8.2", - "defined": "^1.0.0", - "minimist": "^1.2.6" - }, - "bin": { - "detective": "bin/detective.js" - }, - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/diff": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", "dev": true, + "license": "BSD-3-Clause", "engines": { "node": ">=0.3.1" } }, - "node_modules/diffie-hellman": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", - "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", - "dependencies": { - "bn.js": "^4.1.0", - "miller-rabin": "^4.0.0", - "randombytes": "^2.0.0" - } - }, - "node_modules/diffie-hellman/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" - }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", "dev": true, + "license": "MIT", "dependencies": { "path-type": "^4.0.0" }, @@ -4414,6 +3513,7 @@ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", "dev": true, + "license": "Apache-2.0", "dependencies": { "esutils": "^2.0.2" }, @@ -4421,71 +3521,13 @@ "node": ">=6.0.0" } }, - "node_modules/domain-browser": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.2.0.tgz", - "integrity": "sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==", - "dev": true, - "engines": { - "node": ">=0.4", - "npm": ">=1.2" - } - }, - "node_modules/duplexer2": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", - "integrity": "sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==", - "dev": true, - "dependencies": { - "readable-stream": "^2.0.2" - } - }, - "node_modules/duplexify": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", - "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - } - }, - "node_modules/duplexify/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/each-props": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/each-props/-/each-props-1.3.2.tgz", - "integrity": "sha512-vV0Hem3zAGkJAyU7JSjixeU66rwdynTAa1vofCrSA5fEln+m67Az9CcnkVD776/fsN/UjIWmBDoNRS6t6G9RfA==", - "dev": true, - "dependencies": { - "is-plain-object": "^2.0.1", - "object.defaults": "^1.1.0" - } - }, - "node_modules/each-props/node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "node_modules/dotenv": { + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.0.3.tgz", + "integrity": "sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==", "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, "engines": { - "node": ">=0.10.0" + "node": ">=12" } }, "node_modules/eastasianwidth": { @@ -4498,40 +3540,27 @@ "version": "1.4.356", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.356.tgz", "integrity": "sha512-nEftV1dRX3omlxAj42FwqRZT0i4xd2dIg39sog/CnCJeCcL1TRd2Uh0i9Oebgv8Ou0vzTPw++xc+Z20jzS2B6A==", - "dev": true - }, - "node_modules/elliptic": { - "version": "6.5.4", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", - "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", - "dependencies": { - "bn.js": "^4.11.9", - "brorand": "^1.1.0", - "hash.js": "^1.0.0", - "hmac-drbg": "^1.0.1", - "inherits": "^2.0.4", - "minimalistic-assert": "^1.0.1", - "minimalistic-crypto-utils": "^1.0.1" - } - }, - "node_modules/elliptic/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" + "dev": true, + "license": "ISC" }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "dev": true, + "license": "MIT" }, - "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "node_modules/enhanced-resolve": { + "version": "5.12.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.12.0.tgz", + "integrity": "sha512-QHTXI/sZQmko1cbDoNAa3mJ5qhWUUNAq3vR0/YiD379fWQrcfuoX1+HW2S0MTt7XmoPLapdaDKUtelUSPic7hQ==", "dev": true, "dependencies": { - "once": "^1.4.0" + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" } }, "node_modules/error-ex": { @@ -4543,57 +3572,92 @@ "is-arrayish": "^0.2.1" } }, - "node_modules/es5-ext": { - "version": "0.10.62", - "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.62.tgz", - "integrity": "sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA==", + "node_modules/es-abstract": { + "version": "1.21.2", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz", + "integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==", "dev": true, - "hasInstallScript": true, "dependencies": { - "es6-iterator": "^2.0.3", - "es6-symbol": "^3.1.3", - "next-tick": "^1.1.0" + "array-buffer-byte-length": "^1.0.0", + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "es-set-tostringtag": "^2.0.1", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.5", + "get-intrinsic": "^1.2.0", + "get-symbol-description": "^1.0.0", + "globalthis": "^1.0.3", + "gopd": "^1.0.1", + "has": "^1.0.3", + "has-property-descriptors": "^1.0.0", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.5", + "is-array-buffer": "^3.0.2", + "is-callable": "^1.2.7", + "is-negative-zero": "^2.0.2", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.10", + "is-weakref": "^1.0.2", + "object-inspect": "^1.12.3", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.4.3", + "safe-regex-test": "^1.0.0", + "string.prototype.trim": "^1.2.7", + "string.prototype.trimend": "^1.0.6", + "string.prototype.trimstart": "^1.0.6", + "typed-array-length": "^1.0.4", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.9" }, "engines": { - "node": ">=0.10" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es6-error": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", - "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==" - }, - "node_modules/es6-iterator": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", - "integrity": "sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==", + "node_modules/es-set-tostringtag": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", + "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", "dev": true, "dependencies": { - "d": "1", - "es5-ext": "^0.10.35", - "es6-symbol": "^3.1.1" + "get-intrinsic": "^1.1.3", + "has": "^1.0.3", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" } }, - "node_modules/es6-symbol": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz", - "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==", + "node_modules/es-shim-unscopables": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", + "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", "dev": true, "dependencies": { - "d": "^1.0.1", - "ext": "^1.1.2" + "has": "^1.0.3" } }, - "node_modules/es6-weak-map": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.3.tgz", - "integrity": "sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==", + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", "dev": true, "dependencies": { - "d": "1", - "es5-ext": "^0.10.46", - "es6-iterator": "^2.0.3", - "es6-symbol": "^3.1.1" + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/escalade": { @@ -4601,6 +3665,7 @@ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } @@ -4610,6 +3675,7 @@ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -4622,6 +3688,7 @@ "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.38.0.tgz", "integrity": "sha512-pIdsD2jwlUGf/U38Jv97t8lq6HpaU/G9NKbYmpWpZGw3LdTNhZLbJePqxOXGB5+JEKfOPU/XLxYxFh03nr1KTg==", "dev": true, + "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.4.0", @@ -4679,6 +3746,7 @@ "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz", "integrity": "sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==", "dev": true, + "license": "MIT", "bin": { "eslint-config-prettier": "bin/cli.js" }, @@ -4686,47 +3754,288 @@ "eslint": ">=7.0.0" } }, - "node_modules/eslint-plugin-simple-import-sort": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-simple-import-sort/-/eslint-plugin-simple-import-sort-10.0.0.tgz", - "integrity": "sha512-AeTvO9UCMSNzIHRkg8S6c3RPy5YEwKWSQPx3DYghLedo2ZQxowPFLGDN1AZ2evfg6r6mjBSZSLxLFsWSu3acsw==", + "node_modules/eslint-import-resolver-node": { + "version": "0.3.7", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", + "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", "dev": true, - "peerDependencies": { - "eslint": ">=5.0.0" + "dependencies": { + "debug": "^3.2.7", + "is-core-module": "^2.11.0", + "resolve": "^1.22.1" } }, - "node_modules/eslint-scope": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", - "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "ms": "^2.1.1" } }, - "node_modules/eslint-utils": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", - "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "node_modules/eslint-import-resolver-typescript": { + "version": "3.5.5", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.5.5.tgz", + "integrity": "sha512-TdJqPHs2lW5J9Zpe17DZNQuDnox4xo2o+0tE7Pggain9Rbc19ik8kFtXdxZ250FVx2kF4vlt2RSf4qlUpG7bhw==", "dev": true, "dependencies": { - "eslint-visitor-keys": "^1.1.0" + "debug": "^4.3.4", + "enhanced-resolve": "^5.12.0", + "eslint-module-utils": "^2.7.4", + "get-tsconfig": "^4.5.0", + "globby": "^13.1.3", + "is-core-module": "^2.11.0", + "is-glob": "^4.0.3", + "synckit": "^0.8.5" }, "engines": { - "node": ">=6" + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts" + }, + "peerDependencies": { + "eslint": "*", + "eslint-plugin-import": "*" } }, - "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "node_modules/eslint-import-resolver-typescript/node_modules/globby": { + "version": "13.1.4", + "resolved": "https://registry.npmjs.org/globby/-/globby-13.1.4.tgz", + "integrity": "sha512-iui/IiiW+QrJ1X1hKH5qwlMQyv34wJAYwH1vrf8b9kBA4sNiif3gKsMHa+BrdnOpEudWjpotfa7LrTzB1ERS/g==", + "dev": true, + "dependencies": { + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.11", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint-import-resolver-typescript/node_modules/slash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint-module-utils": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", + "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", + "dev": true, + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.27.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz", + "integrity": "sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow==", "dev": true, + "dependencies": { + "array-includes": "^3.1.6", + "array.prototype.flat": "^1.3.1", + "array.prototype.flatmap": "^1.3.1", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.7", + "eslint-module-utils": "^2.7.4", + "has": "^1.0.3", + "is-core-module": "^2.11.0", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.values": "^1.1.6", + "resolve": "^1.22.1", + "semver": "^6.3.0", + "tsconfig-paths": "^3.14.1" + }, "engines": { "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-simple-import-sort": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-simple-import-sort/-/eslint-plugin-simple-import-sort-10.0.0.tgz", + "integrity": "sha512-AeTvO9UCMSNzIHRkg8S6c3RPy5YEwKWSQPx3DYghLedo2ZQxowPFLGDN1AZ2evfg6r6mjBSZSLxLFsWSu3acsw==", + "dev": true, + "peerDependencies": { + "eslint": ">=5.0.0" + } + }, + "node_modules/eslint-plugin-unicorn": { + "version": "46.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-46.0.0.tgz", + "integrity": "sha512-j07WkC+PFZwk8J33LYp6JMoHa1lXc1u6R45pbSAipjpfpb7KIGr17VE2D685zCxR5VL4cjrl65kTJflziQWMDA==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.19.1", + "@eslint-community/eslint-utils": "^4.1.2", + "ci-info": "^3.6.1", + "clean-regexp": "^1.0.0", + "esquery": "^1.4.0", + "indent-string": "^4.0.0", + "is-builtin-module": "^3.2.0", + "jsesc": "^3.0.2", + "lodash": "^4.17.21", + "pluralize": "^8.0.0", + "read-pkg-up": "^7.0.1", + "regexp-tree": "^0.1.24", + "regjsparser": "^0.9.1", + "safe-regex": "^2.1.1", + "semver": "^7.3.8", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=14.18" + }, + "funding": { + "url": "https://github.com/sindresorhus/eslint-plugin-unicorn?sponsor=1" + }, + "peerDependencies": { + "eslint": ">=8.28.0" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/jsesc": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/semver": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.4.0.tgz", + "integrity": "sha512-RgOxM8Mw+7Zus0+zcLEUn8+JfoLpj/huFTItQy2hsM4khuC1HYRDp0cU482Ewn/Fcy6bCjufD8vAj7voC66KQw==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/eslint-plugin-unused-imports": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-2.0.0.tgz", + "integrity": "sha512-3APeS/tQlTrFa167ThtP0Zm0vctjr4M44HMpeg1P4bK6wItarumq0Ma82xorMKdFsWpphQBlRPzw/pxiVELX1A==", + "dev": true, + "dependencies": { + "eslint-rule-composer": "^0.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "@typescript-eslint/eslint-plugin": "^5.0.0", + "eslint": "^8.0.0" + }, + "peerDependenciesMeta": { + "@typescript-eslint/eslint-plugin": { + "optional": true + } + } + }, + "node_modules/eslint-rule-composer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/eslint-rule-composer/-/eslint-rule-composer-0.3.0.tgz", + "integrity": "sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, "node_modules/eslint-visitor-keys": { @@ -4734,6 +4043,7 @@ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.0.tgz", "integrity": "sha512-HPpKPUBQcAsZOsHAFwTtIKcYlCje62XB7SEAcxjtmW6TD1WVpkS6i6/hOVtTZIl4zGj/mBqpFVGvaDneik+VoQ==", "dev": true, + "license": "Apache-2.0", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -4746,6 +4056,7 @@ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, + "license": "ISC", "dependencies": { "is-glob": "^4.0.3" }, @@ -4758,6 +4069,7 @@ "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.1.tgz", "integrity": "sha512-5yxtHSZXRSW5pvv3hAlXM5+/Oswi1AUFqBmbibKb5s6bp3rGIDkyXU6xCoyuuLhijr4SFwPrXRoZjz0AZDN9tg==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "acorn": "^8.8.0", "acorn-jsx": "^5.3.2", @@ -4770,24 +4082,12 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/esquery": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { "estraverse": "^5.1.0" }, @@ -4800,6 +4100,7 @@ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "estraverse": "^5.2.0" }, @@ -4812,6 +4113,7 @@ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=4.0" } @@ -4821,38 +4123,11 @@ "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true, + "license": "BSD-2-Clause", "engines": { "node": ">=0.10.0" } }, - "node_modules/event-emitter": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz", - "integrity": "sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==", - "dev": true, - "dependencies": { - "d": "1", - "es5-ext": "~0.10.14" - } - }, - "node_modules/events": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/events/-/events-2.1.0.tgz", - "integrity": "sha512-3Zmiobend8P9DjmKAty0Era4jV8oJ0yGYe2nJJAxgymF9+N8F2m0hhZiMoWtcfepExzNKZumFU3ksdQbInGWCg==", - "dev": true, - "engines": { - "node": ">=0.4.x" - } - }, - "node_modules/evp_bytestokey": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", - "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", - "dependencies": { - "md5.js": "^1.3.4", - "safe-buffer": "^5.1.1" - } - }, "node_modules/execa": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-7.1.1.tgz", @@ -4876,332 +4151,391 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/expand-brackets": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", - "integrity": "sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA==", + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", "dev": true, + "license": "MIT", "dependencies": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.6.0" } }, - "node_modules/expand-brackets/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", "dev": true, + "license": "MIT" + }, + "node_modules/fast-xml-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.0.tgz", + "integrity": "sha512-+zVQv4aVTO+o8oRUyRL7PjgeVo1J6oP8Cw2+a8UTZQcj5V0yUK5T63gTN0ldgiHDPghUjKc4OpT6SwMTwnOQug==", + "license": "MIT", "dependencies": { - "ms": "2.0.0" + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + }, + "funding": { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" } }, - "node_modules/expand-brackets/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", + "node_modules/fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", "dev": true, + "license": "ISC", "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" + "reusify": "^1.0.4" } }, - "node_modules/expand-brackets/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, + "license": "MIT", "dependencies": { - "is-extendable": "^0.1.0" + "flat-cache": "^3.0.4" }, "engines": { - "node": ">=0.10.0" + "node": "^10.12.0 || >=12.0.0" } }, - "node_modules/expand-brackets/node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A==", + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "dev": true, + "license": "MIT", "dependencies": { - "kind-of": "^3.0.2" + "to-regex-range": "^5.0.1" }, + "engines": { + "node": ">=8" + } + }, + "node_modules/filter-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz", + "integrity": "sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==", + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/expand-brackets/node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "node_modules/find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", "dev": true, "dependencies": { - "is-buffer": "^1.1.5" + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=6" } }, - "node_modules/expand-brackets/node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==", + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, + "license": "MIT", "dependencies": { - "kind-of": "^3.0.2" + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "license": "BSD-3-Clause", + "bin": { + "flat": "cli.js" } }, - "node_modules/expand-brackets/node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "node_modules/flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", "dev": true, + "license": "MIT", "dependencies": { - "is-buffer": "^1.1.5" + "flatted": "^3.1.0", + "rimraf": "^3.0.2" }, "engines": { - "node": ">=0.10.0" + "node": "^10.12.0 || >=12.0.0" } }, - "node_modules/expand-brackets/node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", + "node_modules/flat-cache/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "dev": true, "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" + "glob": "^7.1.3" }, - "engines": { - "node": ">=0.10.0" + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/expand-brackets/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "node_modules/flatted": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", + "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==", "dev": true, - "engines": { - "node": ">=0.10.0" - } + "license": "ISC" }, - "node_modules/expand-brackets/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "license": "MIT", + "dependencies": { + "is-callable": "^1.1.3" + } }, - "node_modules/expand-tilde": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/expand-tilde/-/expand-tilde-2.0.2.tgz", - "integrity": "sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw==", + "node_modules/form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", "dev": true, "dependencies": { - "homedir-polyfill": "^1.0.1" + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" }, "engines": { - "node": ">=0.10.0" + "node": ">= 6" } }, - "node_modules/ext": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/ext/-/ext-1.7.0.tgz", - "integrity": "sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw==", + "node_modules/formidable": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", + "integrity": "sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==", + "deprecated": "Please upgrade to latest, formidable@v2 or formidable@v3! Check these notes: https://bit.ly/2ZEqIau", "dev": true, - "dependencies": { - "type": "^2.7.2" + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" } }, - "node_modules/ext/node_modules/type": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/type/-/type-2.7.2.tgz", - "integrity": "sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==", - "dev": true - }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" }, - "node_modules/extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==", + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", "dev": true, - "dependencies": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">=0.10.0" + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/external-editor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", - "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "license": "MIT" + }, + "node_modules/function.prototype.name": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", + "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", "dev": true, "dependencies": { - "chardet": "^0.7.0", - "iconv-lite": "^0.4.24", - "tmp": "^0.0.33" + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0", + "functions-have-names": "^1.2.2" }, "engines": { - "node": ">=4" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/extglob": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", - "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", "dev": true, - "dependencies": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/extglob/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==", + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "dev": true, - "dependencies": { - "is-descriptor": "^1.0.0" - }, + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=6.9.0" } }, - "node_modules/extglob/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, + "license": "ISC", "engines": { - "node": ">=0.10.0" + "node": "6.* || 8.* || >= 10.*" } }, - "node_modules/extglob/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "node_modules/get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==", "dev": true, + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": "*" } }, - "node_modules/fancy-log": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/fancy-log/-/fancy-log-1.3.3.tgz", - "integrity": "sha512-k9oEhlyc0FrVh25qYuSELjr8oxsCoc4/LEZfg2iJJrfEk/tZL9bCoJE47gqAvI2m/AUjluCS4+3I0eTx8n3AEw==", - "dev": true, + "node_modules/get-intrinsic": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz", + "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==", + "license": "MIT", "dependencies": { - "ansi-gray": "^0.1.1", - "color-support": "^1.1.3", - "parse-node-version": "^1.0.0", - "time-stamp": "^1.0.0" + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" }, - "engines": { - "node": ">= 0.10" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, - "node_modules/fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "node_modules/get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", "dev": true, "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" }, "engines": { - "node": ">=8.6.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, - "node_modules/fast-levenshtein": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-1.1.4.tgz", - "integrity": "sha512-Ia0sQNrMPXXkqVFt6w6M1n1oKo3NfKs+mvaV811Jwir7vAk9a6PVV9VPYf6X3BU97QiLEmuW3uXH9u87zDFfdw==", - "dev": true - }, - "node_modules/fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", - "dev": true + "node_modules/get-tsconfig": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.5.0.tgz", + "integrity": "sha512-MjhiaIWCJ1sAU4pIQ5i5OfOuHHxVo1oYeNsWTON7jxYkod8pHocXeh+SSbmu5OZZZK73B6cbJ2XADzXehLyovQ==", + "dev": true, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } }, - "node_modules/fast-xml-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.0.tgz", - "integrity": "sha512-+zVQv4aVTO+o8oRUyRL7PjgeVo1J6oP8Cw2+a8UTZQcj5V0yUK5T63gTN0ldgiHDPghUjKc4OpT6SwMTwnOQug==", + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, "dependencies": { - "strnum": "^1.0.5" + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" }, - "bin": { - "fxparser": "src/cli/cli.js" + "engines": { + "node": "*" }, "funding": { - "type": "paypal", - "url": "https://paypal.me/naturalintelligence" + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/fastq": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", - "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, + "license": "ISC", "dependencies": { - "reusify": "^1.0.4" + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" } }, - "node_modules/figures": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "node_modules/globals": { + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", "dev": true, + "license": "MIT", "dependencies": { - "escape-string-regexp": "^1.0.5" + "type-fest": "^0.20.2" }, "engines": { "node": ">=8" @@ -5210,1991 +4544,94 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/figures/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "node_modules/globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", "dev": true, "dependencies": { - "flat-cache": "^3.0.4" + "define-properties": "^1.1.3" }, "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/filter-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz", - "integrity": "sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/findup-sync": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-3.0.0.tgz", - "integrity": "sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg==", - "dev": true, - "dependencies": { - "detect-file": "^1.0.0", - "is-glob": "^4.0.0", - "micromatch": "^3.0.4", - "resolve-dir": "^1.0.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/findup-sync/node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/findup-sync/node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", - "dev": true, - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fined": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/fined/-/fined-1.2.0.tgz", - "integrity": "sha512-ZYDqPLGxDkDhDZBjZBb+oD1+j0rA4E0pXY50eplAAOPg2N/gUBSSk5IM1/QhPfyVo19lJ+CvXpqfvk+b2p/8Ng==", - "dev": true, - "dependencies": { - "expand-tilde": "^2.0.2", - "is-plain-object": "^2.0.3", - "object.defaults": "^1.1.0", - "object.pick": "^1.2.0", - "parse-filepath": "^1.0.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/fined/node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/flagged-respawn": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/flagged-respawn/-/flagged-respawn-1.0.1.tgz", - "integrity": "sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/flat": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", - "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "dev": true, - "bin": { - "flat": "cli.js" - } - }, - "node_modules/flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "dependencies": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/flatted": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", - "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==", - "dev": true - }, - "node_modules/flush-write-stream": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.1.1.tgz", - "integrity": "sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "readable-stream": "^2.3.6" - } - }, - "node_modules/for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", - "dependencies": { - "is-callable": "^1.1.3" - } - }, - "node_modules/for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/for-own": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/for-own/-/for-own-1.0.0.tgz", - "integrity": "sha512-0OABksIGrxKK8K4kynWkQ7y1zounQxP+CWnyclVwj81KW3vlLlGUx57DKGcP/LH216GzqnstnPocF16Nxs0Ycg==", - "dev": true, - "dependencies": { - "for-in": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fork-stream": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/fork-stream/-/fork-stream-0.0.4.tgz", - "integrity": "sha512-Pqq5NnT78ehvUnAk/We/Jr22vSvanRlFTpAmQ88xBY/M1TlHe+P0ILuEyXS595ysdGfaj22634LBkGMA2GTcpA==", - "dev": true - }, - "node_modules/form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", - "dev": true, - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/formidable": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", - "integrity": "sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==", - "deprecated": "Please upgrade to latest, formidable@v2 or formidable@v3! Check these notes: https://bit.ly/2ZEqIau", - "dev": true, - "funding": { - "url": "https://ko-fi.com/tunnckoCore/commissions" - } - }, - "node_modules/fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA==", - "dev": true, - "dependencies": { - "map-cache": "^0.2.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fs-mkdirp-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-mkdirp-stream/-/fs-mkdirp-stream-1.0.0.tgz", - "integrity": "sha512-+vSd9frUnapVC2RZYfL3FCB2p3g4TBhaUmrsWlSudsGdnxIuUvBB2QM1VZeBtc49QFwrp+wQLrDs3+xxDgI5gQ==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.11", - "through2": "^2.0.3" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/fs-mkdirp-stream/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "node_modules/functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", - "dev": true - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-assigned-identifiers": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/get-assigned-identifiers/-/get-assigned-identifiers-1.2.0.tgz", - "integrity": "sha512-mBBwmeGTrxEMO4pMaaf/uUEFHnYtwr8FTe8Y/mer4rcV/bye0qGm6pw1bGZFGStxC5O76c5ZAVBGnqHmOaJpdQ==", - "dev": true - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-func-name": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", - "integrity": "sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/get-intrinsic": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz", - "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==", - "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/get-value": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", - "integrity": "sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/glob-stream": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/glob-stream/-/glob-stream-6.1.0.tgz", - "integrity": "sha512-uMbLGAP3S2aDOHUDfdoYcdIePUCfysbAd0IAoWVZbeGU/oNQ8asHVSshLDJUPWxfzj8zsCG7/XeHPHTtow0nsw==", - "dev": true, - "dependencies": { - "extend": "^3.0.0", - "glob": "^7.1.1", - "glob-parent": "^3.1.0", - "is-negated-glob": "^1.0.0", - "ordered-read-streams": "^1.0.0", - "pumpify": "^1.3.5", - "readable-stream": "^2.1.5", - "remove-trailing-separator": "^1.0.1", - "to-absolute-glob": "^2.0.0", - "unique-stream": "^2.0.2" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/glob-stream/node_modules/glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==", - "dev": true, - "dependencies": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - } - }, - "node_modules/glob-stream/node_modules/is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/glob-watcher/-/glob-watcher-5.0.5.tgz", - "integrity": "sha512-zOZgGGEHPklZNjZQaZ9f41i7F2YwE+tS5ZHrDhbBCk3stwahn5vQxnFmBJZHoYdusR6R1bLSXeGUy/BhctwKzw==", - "dev": true, - "dependencies": { - "anymatch": "^2.0.0", - "async-done": "^1.2.0", - "chokidar": "^2.0.0", - "is-negated-glob": "^1.0.0", - "just-debounce": "^1.0.0", - "normalize-path": "^3.0.0", - "object.defaults": "^1.1.0" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/glob-watcher/node_modules/binary-extensions": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", - "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/chokidar": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", - "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", - "deprecated": "Chokidar 2 does not receive security updates since 2019. Upgrade to chokidar 3 with 15x fewer dependencies", - "dev": true, - "dependencies": { - "anymatch": "^2.0.0", - "async-each": "^1.0.1", - "braces": "^2.3.2", - "glob-parent": "^3.1.0", - "inherits": "^2.0.3", - "is-binary-path": "^1.0.0", - "is-glob": "^4.0.0", - "normalize-path": "^3.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.2.1", - "upath": "^1.1.1" - }, - "optionalDependencies": { - "fsevents": "^1.2.7" - } - }, - "node_modules/glob-watcher/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==", - "dev": true, - "dependencies": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - } - }, - "node_modules/glob-watcher/node_modules/glob-parent/node_modules/is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/is-binary-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", - "integrity": "sha512-9fRVlXc0uCxEDj1nQzaWONSpbTfx0FmJfzHF7pwlI8DkWGoHBBea4Pg5Ky0ojwwxQmnSifgbKkI06Qv0Ljgj+Q==", - "dev": true, - "dependencies": { - "binary-extensions": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/micromatch/node_modules/extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==", - "dev": true, - "dependencies": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/micromatch/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dev": true, - "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/micromatch/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/glob-watcher/node_modules/readdirp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", - "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", - "readable-stream": "^2.0.2" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/glob-watcher/node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", - "dev": true, - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/global-modules": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-1.0.0.tgz", - "integrity": "sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==", - "dev": true, - "dependencies": { - "global-prefix": "^1.0.1", - "is-windows": "^1.0.1", - "resolve-dir": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/global-prefix": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", - "integrity": "sha512-5lsx1NUDHtSjfg0eHlmYvZKv8/nVqX4ckFbM+FrGcQ+04KWcWFo9P5MxPZYSzUvyzmdTbI7Eix8Q4IbELDqzKg==", - "dev": true, - "dependencies": { - "expand-tilde": "^2.0.2", - "homedir-polyfill": "^1.0.1", - "ini": "^1.3.4", - "is-windows": "^1.0.1", - "which": "^1.2.14" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/global-prefix/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, - "node_modules/globals": { - "version": "13.20.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", - "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", - "dev": true, - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/glogg": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/glogg/-/glogg-1.0.2.tgz", - "integrity": "sha512-5mwUoSuBk44Y4EshyiqcH95ZntbDdTQqA3QYSrxmzj28Ai0vXBGMH1ApSANH14j2sIRtqCEyg6PfsuP7ElOEDA==", - "dev": true, - "dependencies": { - "sparkles": "^1.0.0" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dependencies": { - "get-intrinsic": "^1.1.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true - }, - "node_modules/grapheme-splitter": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", - "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", - "dev": true - }, - "node_modules/growl": { - "version": "1.10.5", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", - "dev": true, - "engines": { - "node": ">=4.x" - } - }, - "node_modules/gulp": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/gulp/-/gulp-4.0.2.tgz", - "integrity": "sha512-dvEs27SCZt2ibF29xYgmnwwCYZxdxhQ/+LFWlbAW8y7jt68L/65402Lz3+CKy0Ov4rOs+NERmDq7YlZaDqUIfA==", - "dev": true, - "dependencies": { - "glob-watcher": "^5.0.3", - "gulp-cli": "^2.2.0", - "undertaker": "^1.2.1", - "vinyl-fs": "^3.0.0" - }, - "bin": { - "gulp": "bin/gulp.js" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/gulp-babel": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/gulp-babel/-/gulp-babel-8.0.0.tgz", - "integrity": "sha512-oomaIqDXxFkg7lbpBou/gnUkX51/Y/M2ZfSjL2hdqXTAlSWZcgZtd2o0cOH0r/eE8LWD0+Q/PsLsr2DKOoqToQ==", - "dev": true, - "dependencies": { - "plugin-error": "^1.0.1", - "replace-ext": "^1.0.0", - "through2": "^2.0.0", - "vinyl-sourcemaps-apply": "^0.2.0" - }, - "engines": { - "node": ">=6" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/gulp-babel/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/gulp-cli": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/gulp-cli/-/gulp-cli-2.3.0.tgz", - "integrity": "sha512-zzGBl5fHo0EKSXsHzjspp3y5CONegCm8ErO5Qh0UzFzk2y4tMvzLWhoDokADbarfZRL2pGpRp7yt6gfJX4ph7A==", - "dev": true, - "dependencies": { - "ansi-colors": "^1.0.1", - "archy": "^1.0.0", - "array-sort": "^1.0.0", - "color-support": "^1.1.3", - "concat-stream": "^1.6.0", - "copy-props": "^2.0.1", - "fancy-log": "^1.3.2", - "gulplog": "^1.0.0", - "interpret": "^1.4.0", - "isobject": "^3.0.1", - "liftoff": "^3.1.0", - "matchdep": "^2.0.0", - "mute-stdout": "^1.0.0", - "pretty-hrtime": "^1.0.0", - "replace-homedir": "^1.0.0", - "semver-greatest-satisfied-range": "^1.1.0", - "v8flags": "^3.2.0", - "yargs": "^7.1.0" - }, - "bin": { - "gulp": "bin/gulp.js" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/gulp-cli/node_modules/ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gulp-cli/node_modules/cliui": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", - "integrity": "sha512-0yayqDxWQbqk3ojkYqUKqaAQ6AfNKeKWRNA8kR0WXzAsdHpP4BIaOmMAG87JGuO6qcobyW4GjxHd9PmhEd+T9w==", - "dev": true, - "dependencies": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wrap-ansi": "^2.0.0" - } - }, - "node_modules/gulp-cli/node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gulp-cli/node_modules/get-caller-file": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", - "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", - "dev": true - }, - "node_modules/gulp-cli/node_modules/is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw==", - "dev": true, - "dependencies": { - "number-is-nan": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gulp-cli/node_modules/string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw==", - "dev": true, - "dependencies": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gulp-cli/node_modules/strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", - "dev": true, - "dependencies": { - "ansi-regex": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gulp-cli/node_modules/wrap-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", - "integrity": "sha512-vAaEaDM946gbNpH5pLVNR+vX2ht6n0Bt3GXwVB1AuAqZosOvHNF3P7wDnh8KLkSqgUh0uh77le7Owgoz+Z9XBw==", - "dev": true, - "dependencies": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gulp-cli/node_modules/y18n": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.2.tgz", - "integrity": "sha512-uGZHXkHnhF0XeeAPgnKfPv1bgKAYyVvmNL1xlKsPYZPaIHxGti2hHqvOCQv71XMsLxu1QjergkqogUnms5D3YQ==", - "dev": true - }, - "node_modules/gulp-cli/node_modules/yargs": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-7.1.2.tgz", - "integrity": "sha512-ZEjj/dQYQy0Zx0lgLMLR8QuaqTihnxirir7EwUHp1Axq4e3+k8jXU5K0VLbNvedv1f4EWtBonDIZm0NUr+jCcA==", - "dev": true, - "dependencies": { - "camelcase": "^3.0.0", - "cliui": "^3.2.0", - "decamelize": "^1.1.1", - "get-caller-file": "^1.0.1", - "os-locale": "^1.4.0", - "read-pkg-up": "^1.0.1", - "require-directory": "^2.1.1", - "require-main-filename": "^1.0.1", - "set-blocking": "^2.0.0", - "string-width": "^1.0.2", - "which-module": "^1.0.0", - "y18n": "^3.2.1", - "yargs-parser": "^5.0.1" - } - }, - "node_modules/gulp-cli/node_modules/yargs-parser": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-5.0.1.tgz", - "integrity": "sha512-wpav5XYiddjXxirPoCTUPbqM0PXvJ9hiBMvuJgInvo4/lAOTZzUprArw17q2O1P2+GHhbBr18/iQwjL5Z9BqfA==", - "dev": true, - "dependencies": { - "camelcase": "^3.0.0", - "object.assign": "^4.1.0" - } - }, - "node_modules/gulp-eslint": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/gulp-eslint/-/gulp-eslint-6.0.0.tgz", - "integrity": "sha512-dCVPSh1sA+UVhn7JSQt7KEb4An2sQNbOdB3PA8UCfxsoPlAKjJHxYHGXdXC7eb+V1FAnilSFFqslPrq037l1ig==", - "dev": true, - "dependencies": { - "eslint": "^6.0.0", - "fancy-log": "^1.3.2", - "plugin-error": "^1.0.1" - } - }, - "node_modules/gulp-eslint/node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/gulp-eslint/node_modules/ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/gulp-eslint/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/gulp-eslint/node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/gulp-eslint/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/gulp-eslint/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/gulp-eslint/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/gulp-eslint/node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "engines": { - "node": ">=4.8" - } - }, - "node_modules/gulp-eslint/node_modules/cross-spawn/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/gulp-eslint/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/gulp-eslint/node_modules/eslint": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", - "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.0.0", - "ajv": "^6.10.0", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", - "debug": "^4.0.1", - "doctrine": "^3.0.0", - "eslint-scope": "^5.0.0", - "eslint-utils": "^1.4.3", - "eslint-visitor-keys": "^1.1.0", - "espree": "^6.1.2", - "esquery": "^1.0.1", - "esutils": "^2.0.2", - "file-entry-cache": "^5.0.1", - "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.0.0", - "globals": "^12.1.0", - "ignore": "^4.0.6", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "inquirer": "^7.0.0", - "is-glob": "^4.0.0", - "js-yaml": "^3.13.1", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.14", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", - "natural-compare": "^1.4.0", - "optionator": "^0.8.3", - "progress": "^2.0.0", - "regexpp": "^2.0.1", - "semver": "^6.1.2", - "strip-ansi": "^5.2.0", - "strip-json-comments": "^3.0.1", - "table": "^5.2.3", - "text-table": "^0.2.0", - "v8-compile-cache": "^2.0.3" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^8.10.0 || ^10.13.0 || >=11.10.1" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/gulp-eslint/node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/gulp-eslint/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/gulp-eslint/node_modules/espree": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", - "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", - "dev": true, - "dependencies": { - "acorn": "^7.1.1", - "acorn-jsx": "^5.2.0", - "eslint-visitor-keys": "^1.1.0" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/gulp-eslint/node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/gulp-eslint/node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true - }, - "node_modules/gulp-eslint/node_modules/file-entry-cache": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", - "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", - "dev": true, - "dependencies": { - "flat-cache": "^2.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/gulp-eslint/node_modules/flat-cache": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", - "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", - "dev": true, - "dependencies": { - "flatted": "^2.0.0", - "rimraf": "2.6.3", - "write": "1.0.3" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/gulp-eslint/node_modules/flatted": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", - "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", - "dev": true - }, - "node_modules/gulp-eslint/node_modules/globals": { - "version": "12.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", - "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", - "dev": true, - "dependencies": { - "type-fest": "^0.8.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/gulp-eslint/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/gulp-eslint/node_modules/ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/gulp-eslint/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dev": true, - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/gulp-eslint/node_modules/levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", - "dev": true, - "dependencies": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/gulp-eslint/node_modules/optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", - "dev": true, - "dependencies": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/gulp-eslint/node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/gulp-eslint/node_modules/prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/gulp-eslint/node_modules/rimraf": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", - "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - } - }, - "node_modules/gulp-eslint/node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", - "dev": true, - "dependencies": { - "shebang-regex": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gulp-eslint/node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/gulp-eslint/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "dependencies": { - "ansi-regex": "^4.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/gulp-eslint/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/gulp-eslint/node_modules/type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", - "dev": true, - "dependencies": { - "prelude-ls": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/gulp-eslint/node_modules/type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/gulp-eslint/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, - "node_modules/gulp-if": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/gulp-if/-/gulp-if-3.0.0.tgz", - "integrity": "sha512-fCUEngzNiEZEK2YuPm+sdMpO6ukb8+/qzbGfJBXyNOXz85bCG7yBI+pPSl+N90d7gnLvMsarthsAImx0qy7BAw==", - "dev": true, - "dependencies": { - "gulp-match": "^1.1.0", - "ternary-stream": "^3.0.0", - "through2": "^3.0.1" - } - }, - "node_modules/gulp-match": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/gulp-match/-/gulp-match-1.1.0.tgz", - "integrity": "sha512-DlyVxa1Gj24DitY2OjEsS+X6tDpretuxD6wTfhXE/Rw2hweqc1f6D/XtsJmoiCwLWfXgR87W9ozEityPCVzGtQ==", - "dev": true, - "dependencies": { - "minimatch": "^3.0.3" - } - }, - "node_modules/gulp-mocha": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/gulp-mocha/-/gulp-mocha-8.0.0.tgz", - "integrity": "sha512-FdbBydfzszaES/gXfwD6RFq1yJTj4Z6328R1yqsmhf+t7hW2aj9ZD9Hz8boQShjZ9J8/w6tQBM5mePb8K2pbqA==", - "dev": true, - "dependencies": { - "dargs": "^7.0.0", - "execa": "^5.0.0", - "mocha": "^8.3.0", - "plugin-error": "^1.0.1", - "supports-color": "^8.1.1", - "through2": "^4.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - }, - "peerDependencies": { - "gulp": ">=4" - }, - "peerDependenciesMeta": { - "gulp": { - "optional": true - } - } - }, - "node_modules/gulp-mocha/node_modules/ansi-colors": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/gulp-mocha/node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/gulp-mocha/node_modules/chokidar": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", - "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", - "dev": true, - "dependencies": { - "anymatch": "~3.1.1", - "braces": "~3.0.2", - "glob-parent": "~5.1.0", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.5.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.1" - } - }, - "node_modules/gulp-mocha/node_modules/debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/gulp-mocha/node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/gulp-mocha/node_modules/glob": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", - "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/gulp-mocha/node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/gulp-mocha/node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/gulp-mocha/node_modules/js-yaml": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz", - "integrity": "sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q==", - "dev": true, - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/gulp-mocha/node_modules/log-symbols": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", - "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", - "dev": true, - "dependencies": { - "chalk": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/gulp-mocha/node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/gulp-mocha/node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/gulp-mocha/node_modules/mocha": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.4.0.tgz", - "integrity": "sha512-hJaO0mwDXmZS4ghXsvPVriOhsxQ7ofcpQdm8dE+jISUOKopitvnXFQmpRR7jd2K6VBG6E26gU3IAbXXGIbu4sQ==", - "dev": true, - "dependencies": { - "@ungap/promise-all-settled": "1.1.2", - "ansi-colors": "4.1.1", - "browser-stdout": "1.3.1", - "chokidar": "3.5.1", - "debug": "4.3.1", - "diff": "5.0.0", - "escape-string-regexp": "4.0.0", - "find-up": "5.0.0", - "glob": "7.1.6", - "growl": "1.10.5", - "he": "1.2.0", - "js-yaml": "4.0.0", - "log-symbols": "4.0.0", - "minimatch": "3.0.4", - "ms": "2.1.3", - "nanoid": "3.1.20", - "serialize-javascript": "5.0.1", - "strip-json-comments": "3.1.1", - "supports-color": "8.1.1", - "which": "2.0.2", - "wide-align": "1.1.3", - "workerpool": "6.1.0", - "yargs": "16.2.0", - "yargs-parser": "20.2.4", - "yargs-unparser": "2.0.0" - }, - "bin": { - "_mocha": "bin/_mocha", - "mocha": "bin/mocha" - }, - "engines": { - "node": ">= 10.12.0" + "node": ">= 0.4" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mochajs" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gulp-mocha/node_modules/mocha/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "node_modules/globalyzer": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz", + "integrity": "sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==", "dev": true }, - "node_modules/gulp-mocha/node_modules/nanoid": { - "version": "3.1.20", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", - "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==", - "dev": true, - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/gulp-mocha/node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/gulp-mocha/node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "dev": true, + "license": "MIT", "dependencies": { - "mimic-fn": "^2.1.0" + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" }, "engines": { - "node": ">=6" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/gulp-mocha/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/gulp-mocha/node_modules/readdirp": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", - "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", - "dev": true, - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/gulp-mocha/node_modules/serialize-javascript": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz", - "integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==", - "dev": true, - "dependencies": { - "randombytes": "^2.1.0" - } - }, - "node_modules/gulp-mocha/node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/gulp-mocha/node_modules/through2": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", - "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", - "dev": true, - "dependencies": { - "readable-stream": "3" - } - }, - "node_modules/gulp-mocha/node_modules/workerpool": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz", - "integrity": "sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==", + "node_modules/globrex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz", + "integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==", "dev": true }, - "node_modules/gulp-sourcemaps": { - "version": "2.6.5", - "resolved": "https://registry.npmjs.org/gulp-sourcemaps/-/gulp-sourcemaps-2.6.5.tgz", - "integrity": "sha512-SYLBRzPTew8T5Suh2U8jCSDKY+4NARua4aqjj8HOysBh2tSgT9u4jc1FYirAdPx1akUxxDeK++fqw6Jg0LkQRg==", - "dev": true, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "license": "MIT", "dependencies": { - "@gulp-sourcemaps/identity-map": "1.X", - "@gulp-sourcemaps/map-sources": "1.X", - "acorn": "5.X", - "convert-source-map": "1.X", - "css": "2.X", - "debug-fabulous": "1.X", - "detect-newline": "2.X", - "graceful-fs": "4.X", - "source-map": "~0.6.0", - "strip-bom-string": "1.X", - "through2": "2.X" + "get-intrinsic": "^1.1.3" }, - "engines": { - "node": ">=4" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gulp-sourcemaps/node_modules/acorn": { - "version": "5.7.4", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", - "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true }, - "node_modules/gulp-sourcemaps/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "node_modules/grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } + "license": "MIT" }, - "node_modules/gulplog": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gulplog/-/gulplog-1.0.0.tgz", - "integrity": "sha512-hm6N8nrm3Y08jXie48jsC55eCZz9mnb4OirAStEk2deqeyhXU3C1otDVh+ccttMuc1sBi6RX6ZJ720hs9RCvgw==", + "node_modules/growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", "dev": true, - "dependencies": { - "glogg": "^1.0.0" - }, + "license": "MIT", "engines": { - "node": ">= 0.10" + "node": ">=4.x" } }, "node_modules/has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "license": "MIT", "dependencies": { "function-bind": "^1.1.1" }, @@ -7202,11 +4639,21 @@ "node": ">= 0.4.0" } }, + "node_modules/has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -7223,10 +4670,23 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/has-symbols": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -7238,6 +4698,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "license": "MIT", "dependencies": { "has-symbols": "^1.0.2" }, @@ -7248,156 +4709,22 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", - "integrity": "sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw==", - "dev": true, - "dependencies": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", - "integrity": "sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ==", - "dev": true, - "dependencies": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values/node_modules/kind-of": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", - "integrity": "sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/hash-base": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", - "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", - "dependencies": { - "inherits": "^2.0.4", - "readable-stream": "^3.6.0", - "safe-buffer": "^5.2.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/hash-base/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/hash.js": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", - "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", - "dependencies": { - "inherits": "^2.0.3", - "minimalistic-assert": "^1.0.1" - } - }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "dev": true, + "license": "MIT", "bin": { "he": "bin/he" } }, - "node_modules/hmac-drbg": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", - "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==", - "dependencies": { - "hash.js": "^1.0.3", - "minimalistic-assert": "^1.0.0", - "minimalistic-crypto-utils": "^1.0.1" - } - }, - "node_modules/homedir-polyfill": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz", - "integrity": "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==", - "dev": true, - "dependencies": { - "parse-passwd": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/hosted-git-info": { "version": "2.8.9", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, - "node_modules/htmlescape": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/htmlescape/-/htmlescape-1.1.1.tgz", - "integrity": "sha512-eVcrzgbR4tim7c7soKQKtxa/kQM4TzjnlU83rcZ9bHU6t31ehfV7SktN6McWgwPWg+JYMA/O3qpGxBvFq1z2Jg==", - "dev": true, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/https-browserify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz", - "integrity": "sha512-J+FkSdyD+0mA0N+81tMotaRMfSL9SGi+xpD3T6YApKsc3bGSXJlfXri3VyFOeYkfLRQisDk1W+jIFFKBeUBbBg==", - "dev": true - }, "node_modules/human-signals": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", @@ -7422,43 +4749,12 @@ "url": "https://github.com/sponsors/typicode" } }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dev": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, "node_modules/ignore": { "version": "5.2.4", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", "dev": true, + "license": "MIT", "engines": { "node": ">= 4" } @@ -7468,6 +4764,7 @@ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dev": true, + "license": "MIT", "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -7484,6 +4781,7 @@ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.8.19" } @@ -7502,6 +4800,7 @@ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "dev": true, + "license": "ISC", "dependencies": { "once": "^1.3.0", "wrappy": "1" @@ -7510,186 +4809,37 @@ "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true - }, - "node_modules/inline-source-map": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/inline-source-map/-/inline-source-map-0.6.2.tgz", - "integrity": "sha512-0mVWSSbNDvedDWIN4wxLsdPM4a7cIPcpyMxj3QZ406QRwQ6ePGB1YIHxVPjqpcUGbWQ5C+nHTwGNWAGvt7ggVA==", - "dev": true, - "dependencies": { - "source-map": "~0.5.3" - } - }, - "node_modules/inline-source-map/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/inquirer": { - "version": "7.3.3", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", - "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", - "dev": true, - "dependencies": { - "ansi-escapes": "^4.2.1", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-width": "^3.0.0", - "external-editor": "^3.0.3", - "figures": "^3.0.0", - "lodash": "^4.17.19", - "mute-stream": "0.0.8", - "run-async": "^2.4.0", - "rxjs": "^6.6.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0", - "through": "^2.3.6" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/inquirer/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/inquirer/node_modules/rxjs": { - "version": "6.6.7", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz", - "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==", - "dev": true, - "dependencies": { - "tslib": "^1.9.0" - }, - "engines": { - "npm": ">=2.0.0" - } - }, - "node_modules/inquirer/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" }, - "node_modules/insert-module-globals": { - "version": "7.2.1", - "resolved": "https://registry.npmjs.org/insert-module-globals/-/insert-module-globals-7.2.1.tgz", - "integrity": "sha512-ufS5Qq9RZN+Bu899eA9QCAYThY+gGW7oRkmb0vC93Vlyu/CFGcH0OYPEjVkDXA5FEbTt1+VWzdoOD3Ny9N+8tg==", + "node_modules/internal-slot": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", + "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", "dev": true, "dependencies": { - "acorn-node": "^1.5.2", - "combine-source-map": "^0.8.0", - "concat-stream": "^1.6.1", - "is-buffer": "^1.1.0", - "JSONStream": "^1.0.3", - "path-is-absolute": "^1.0.1", - "process": "~0.11.0", - "through2": "^2.0.0", - "undeclared-identifiers": "^1.1.2", - "xtend": "^4.0.0" + "get-intrinsic": "^1.2.0", + "has": "^1.0.3", + "side-channel": "^1.0.4" }, - "bin": { - "insert-module-globals": "bin/cmd.js" - } - }, - "node_modules/insert-module-globals/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/interpret": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", - "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/invert-kv": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", - "integrity": "sha512-xgs2NH9AE66ucSq4cNG1nhSFghr5l6tdL15Pk+jl46bmmBapgoaY/AacXyaDznAqmGL99TiLSQgO/XazFSKYeQ==", - "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" } }, "node_modules/ipaddr.js": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz", "integrity": "sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng==", + "license": "MIT", "engines": { "node": ">= 10" } }, - "node_modules/is-absolute": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-absolute/-/is-absolute-1.0.0.tgz", - "integrity": "sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA==", - "dev": true, - "dependencies": { - "is-relative": "^1.0.0", - "is-windows": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-arguments": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "license": "MIT", "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -7701,17 +4851,44 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-array-buffer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", + "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.0", + "is-typed-array": "^1.1.10" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dev": true, + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "dev": true, + "license": "MIT", "dependencies": { "binary-extensions": "^2.0.0" }, @@ -7719,16 +4896,42 @@ "node": ">=8" } }, - "node_modules/is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", - "dev": true + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "dependencies": { + "builtin-modules": "^3.3.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/is-callable": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -7741,6 +4944,7 @@ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", "dev": true, + "license": "MIT", "dependencies": { "has": "^1.0.3" }, @@ -7748,72 +4952,34 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", "dev": true, "dependencies": { - "kind-of": "^6.0.0" + "has-tostringtag": "^1.0.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-descriptor/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", "dev": true, - "dependencies": { - "is-plain-object": "^2.0.4" + "bin": { + "is-docker": "cli.js" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-extendable/node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" + "node": ">=8" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/is-extglob": { @@ -7821,6 +4987,7 @@ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -7841,6 +5008,7 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -7856,6 +5024,7 @@ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, + "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" }, @@ -7863,22 +5032,31 @@ "node": ">=0.10.0" } }, - "node_modules/is-negated-glob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-negated-glob/-/is-negated-glob-1.0.0.tgz", - "integrity": "sha512-czXVVn/QEmgvej1f50BZ648vUI+em0xqMq2Sn+QncCLN4zj1UAxlT+kw/6ggQTOaZPd1HqKQGEqbpQVtJucWug==", + "node_modules/is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-number": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz", - "integrity": "sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==", + "node_modules/is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-path-inside": { @@ -7886,6 +5064,7 @@ "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -7895,35 +5074,49 @@ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/is-plain-object": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", - "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", "dev": true, + "dependencies": { + "isobject": "^3.0.1" + }, "engines": { "node": ">=0.10.0" } }, - "node_modules/is-promise": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz", - "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==", - "dev": true - }, - "node_modules/is-relative": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-relative/-/is-relative-1.0.0.tgz", - "integrity": "sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==", + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", "dev": true, "dependencies": { - "is-unc-path": "^1.0.0" + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-stream": { @@ -7938,10 +5131,41 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-typed-array": { "version": "1.1.10", "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", + "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.5", "call-bind": "^1.0.2", @@ -7956,23 +5180,12 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-unc-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-unc-path/-/is-unc-path-1.0.0.tgz", - "integrity": "sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==", - "dev": true, - "dependencies": { - "unc-path-regex": "^0.1.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-unicode-supported": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -7980,41 +5193,36 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-utf8": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", - "integrity": "sha512-rMYPYvCzsXywIsldgLaSoPlw5PfoB/ssr7hY4pLfcodrA5M/eArza1a9VmTiNIBNMjOGr1Ow9mTyU2o69U6U9Q==", - "dev": true - }, - "node_modules/is-valid-glob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-valid-glob/-/is-valid-glob-1.0.0.tgz", - "integrity": "sha512-AhiROmoEFDSsjx8hW+5sGwgKVIORcXnrlAx/R0ZSeaPw70Vw0CqkGBBhHGL58Uox2eXnU1AnvXJl1XlyedO5bA==", + "node_modules/is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", "dev": true, - "engines": { - "node": ">=0.10.0" + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", "dev": true, + "dependencies": { + "is-docker": "^2.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "dev": true - }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/isobject": { "version": "3.0.1", @@ -8030,6 +5238,7 @@ "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.4.0.tgz", "integrity": "sha512-FfVSdx6pJ41Oa+CF7RDaFmTnCaFhua+SNYQX74riGOpl96x+2jQCqEfQ2bnXu/5DPCqlRuiqyvTJM0Qjz26IVg==", "dev": true, + "license": "MIT", "funding": { "type": "opencollective", "url": "https://opencollective.com/js-sdsl" @@ -8039,13 +5248,15 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dev": true, + "license": "MIT", "dependencies": { "argparse": "^2.0.1" }, @@ -8058,6 +5269,7 @@ "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "dev": true, + "license": "MIT", "bin": { "jsesc": "bin/jsesc" }, @@ -8065,156 +5277,59 @@ "node": ">=4" } }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "node_modules/json-stable-stringify": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-0.0.1.tgz", - "integrity": "sha512-nKtD/Qxm7tWdZqJoldEC7fF0S41v0mWbeaXG3637stOWfyGxTgWTYE2wtfKmjzpvxv2MA2xzxsXOIiwUpkX6Qw==", - "dev": true, - "dependencies": { - "jsonify": "~0.0.0" - } - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true - }, - "node_modules/json-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-stream/-/json-stream-1.0.0.tgz", - "integrity": "sha512-H/ZGY0nIAg3QcOwE1QN/rK/Fa7gJn7Ii5obwp6zyPO4xiPNwpIMjqy2gwjBEGqzkF/vSWEIBQCBuN19hYiL6Qg==" - }, - "node_modules/json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", - "dev": true - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsonify": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.1.tgz", - "integrity": "sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", - "dev": true, - "engines": [ - "node >= 0.2.0" - ] - }, - "node_modules/JSONStream": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", - "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", - "dev": true, - "dependencies": { - "jsonparse": "^1.2.0", - "through": ">=2.2.7 <3" - }, - "bin": { - "JSONStream": "bin.js" - }, - "engines": { - "node": "*" - } - }, - "node_modules/just-debounce": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/just-debounce/-/just-debounce-1.1.0.tgz", - "integrity": "sha512-qpcRocdkUmf+UTNBYx5w6dexX5J31AKK1OmPwH630a83DdVVUIngk55RSAiIGpQyoH0dlr872VHfPjnQnK1qDQ==", - "dev": true - }, - "node_modules/kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/labeled-stream-splicer": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/labeled-stream-splicer/-/labeled-stream-splicer-2.0.2.tgz", - "integrity": "sha512-Ca4LSXFFZUjPScRaqOcFxneA0VpKZr4MMYCljyQr4LIewTLb3Y0IUTIsnBBsVubIeEfxeSZpSjSsRM8APEQaAw==", - "dev": true, - "dependencies": { - "inherits": "^2.0.1", - "stream-splicer": "^2.0.0" - } - }, - "node_modules/last-run": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/last-run/-/last-run-1.1.1.tgz", - "integrity": "sha512-U/VxvpX4N/rFvPzr3qG5EtLKEnNI0emvIQB3/ecEwv+8GHaUKbIB8vxv1Oai5FAF0d0r7LXHhLLe5K/yChm5GQ==", "dev": true, - "dependencies": { - "default-resolution": "^2.0.0", - "es6-weak-map": "^2.0.1" - }, - "engines": { - "node": ">= 0.10" - } + "license": "MIT" }, - "node_modules/lazystream": { + "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", - "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true, - "dependencies": { - "readable-stream": "^2.0.5" - }, - "engines": { - "node": ">= 0.6.3" - } + "license": "MIT" }, - "node_modules/lcid": { + "node_modules/json-stream": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", - "integrity": "sha512-YiGkH6EnGrDGqLMITnGjXtGmNtjoXw9SVUzcaos8RBi7Ps0VBylkq+vOcY9QE5poLasPCR849ucFUkl0UzUyOw==", + "resolved": "https://registry.npmjs.org/json-stream/-/json-stream-1.0.0.tgz", + "integrity": "sha512-H/ZGY0nIAg3QcOwE1QN/rK/Fa7gJn7Ii5obwp6zyPO4xiPNwpIMjqy2gwjBEGqzkF/vSWEIBQCBuN19hYiL6Qg==", + "license": "MIT" + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", "dev": true, - "dependencies": { - "invert-kv": "^1.0.0" + "license": "ISC" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" }, "engines": { - "node": ">=0.10.0" + "node": ">=6" } }, - "node_modules/lead": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lead/-/lead-1.0.0.tgz", - "integrity": "sha512-IpSVCk9AYvLHo5ctcIXxOBpMWUe+4TKN3VPWAKUbJikkmsGp0VrSM8IttVc32D6J4WUsiPE6aEFRNmIoF/gdow==", + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", "dev": true, - "dependencies": { - "flush-write-stream": "^1.0.2" - }, "engines": { - "node": ">= 0.10" + "node": ">=0.10.0" } }, "node_modules/levn": { @@ -8222,6 +5337,7 @@ "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, + "license": "MIT", "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" @@ -8230,37 +5346,6 @@ "node": ">= 0.8.0" } }, - "node_modules/liftoff": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/liftoff/-/liftoff-3.1.0.tgz", - "integrity": "sha512-DlIPlJUkCV0Ips2zf2pJP0unEoT1kwYhiiPUGF3s/jtxTCjziNLoiVVh+jqWOWeFi6mmwQ5fNxvAUyPad4Dfog==", - "dev": true, - "dependencies": { - "extend": "^3.0.0", - "findup-sync": "^3.0.0", - "fined": "^1.0.1", - "flagged-respawn": "^1.0.0", - "is-plain-object": "^2.0.4", - "object.map": "^1.0.0", - "rechoir": "^0.6.2", - "resolve": "^1.1.7" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/liftoff/node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/lilconfig": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", @@ -8270,6 +5355,12 @@ "node": ">=10" } }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, "node_modules/lint-staged": { "version": "13.2.1", "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.2.1.tgz", @@ -8392,27 +5483,12 @@ "node": ">=8" } }, - "node_modules/load-json-file": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", - "integrity": "sha512-cy7ZdNRXdablkXYNI049pthVeXFurRyb9+hA/dZzerZ0pGTx42z+y+ssxBaVV2l70t1muq5IdKhn4UtcoGUY9A==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0", - "strip-bom": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, + "license": "MIT", "dependencies": { "p-locate": "^5.0.0" }, @@ -8426,31 +5502,29 @@ "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "license": "MIT" }, "node_modules/lodash.debounce": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", - "dev": true - }, - "node_modules/lodash.memoize": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-3.0.4.tgz", - "integrity": "sha512-eDn9kqrAmVUC1wmZvlQ6Uhde44n+tXpqPrN8olQJbttgh0oKclk+SF54P47VEGE9CEiMeRwAP8BaM7UHvBkz2A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/log-symbols": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", "dev": true, + "license": "MIT", "dependencies": { "chalk": "^4.1.0", "is-unicode-supported": "^0.1.0" @@ -8539,6 +5613,7 @@ "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.6.tgz", "integrity": "sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==", "dev": true, + "license": "MIT", "dependencies": { "get-func-name": "^2.0.0" } @@ -8548,266 +5623,31 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^3.0.2" } }, - "node_modules/lru-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/lru-queue/-/lru-queue-0.1.0.tgz", - "integrity": "sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==", - "dev": true, - "dependencies": { - "es5-ext": "~0.10.2" - } - }, - "node_modules/make-iterator": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/make-iterator/-/make-iterator-1.0.1.tgz", - "integrity": "sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw==", - "dev": true, - "dependencies": { - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/make-iterator/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/map-cache": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", - "integrity": "sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/map-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", - "integrity": "sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w==", - "dev": true, - "dependencies": { - "object-visit": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/matchdep/-/matchdep-2.0.0.tgz", - "integrity": "sha512-LFgVbaHIHMqCRuCZyfCtUOq9/Lnzhi7Z0KFUE2fhD54+JN2jLh3hC02RLkqauJ3U4soU6H1J3tfj/Byk7GoEjA==", - "dev": true, - "dependencies": { - "findup-sync": "^2.0.0", - "micromatch": "^3.0.4", - "resolve": "^1.4.0", - "stack-trace": "0.0.10" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/matchdep/node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/findup-sync": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-2.0.0.tgz", - "integrity": "sha512-vs+3unmJT45eczmcAZ6zMJtxN3l/QXeccaXQx5cu/MeJMhewVfoWZqibRkOxPnmoR59+Zy5hjabfQc6JLSah4g==", - "dev": true, - "dependencies": { - "detect-file": "^1.0.0", - "is-glob": "^3.1.0", - "micromatch": "^3.0.4", - "resolve-dir": "^1.0.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/matchdep/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/matchdep/node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", "dev": true, "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" + "pify": "^4.0.1", + "semver": "^5.6.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/md5.js": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", - "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", - "dependencies": { - "hash-base": "^3.0.0", - "inherits": "^2.0.1", - "safe-buffer": "^5.1.2" + "node": ">=6" } }, - "node_modules/memoizee": { - "version": "0.4.15", - "resolved": "https://registry.npmjs.org/memoizee/-/memoizee-0.4.15.tgz", - "integrity": "sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ==", + "node_modules/make-dir/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "dev": true, - "dependencies": { - "d": "^1.0.1", - "es5-ext": "^0.10.53", - "es6-weak-map": "^2.0.3", - "event-emitter": "^0.3.5", - "is-promise": "^2.2.2", - "lru-queue": "^0.1.0", - "next-tick": "^1.1.0", - "timers-ext": "^0.1.7" + "bin": { + "semver": "bin/semver" } }, "node_modules/merge-stream": { @@ -8821,6 +5661,7 @@ "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 8" } @@ -8830,6 +5671,7 @@ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -8847,28 +5689,12 @@ "node": ">=8.6" } }, - "node_modules/miller-rabin": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", - "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", - "dependencies": { - "bn.js": "^4.0.0", - "brorand": "^1.0.1" - }, - "bin": { - "miller-rabin": "bin/miller-rabin" - } - }, - "node_modules/miller-rabin/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" - }, "node_modules/mime": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", "dev": true, + "license": "MIT", "bin": { "mime": "cli.js" }, @@ -8880,6 +5706,7 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -8888,6 +5715,7 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", "dependencies": { "mime-db": "1.52.0" }, @@ -8907,21 +5735,21 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/minimalistic-assert": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", - "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" - }, - "node_modules/minimalistic-crypto-utils": { + "node_modules/min-indent": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", - "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==" + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -8933,27 +5761,16 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/mixin-deep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", - "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", - "dev": true, - "dependencies": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/mkdirp": { "version": "0.5.6", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "license": "MIT", "dependencies": { "minimist": "^1.2.6" }, @@ -8961,17 +5778,12 @@ "mkdirp": "bin/cmd.js" } }, - "node_modules/mkdirp-classic": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "dev": true - }, "node_modules/mocha": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.2.2.tgz", "integrity": "sha512-L6XC3EdwT6YrIk0yXpavvLkn8h+EU+Y5UcCHKECyMbdUIxyMuZj4bX4U9e1nvnvUUvQVsV2VHQr5zLdcUkhW/g==", "dev": true, + "license": "MIT", "dependencies": { "@ungap/promise-all-settled": "1.1.2", "ansi-colors": "4.1.1", @@ -9014,13 +5826,15 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/mocha-steps/-/mocha-steps-1.3.0.tgz", "integrity": "sha512-KZvpMJTqzLZw3mOb+EEuYi4YZS41C9iTnb7skVFRxHjUd1OYbl64tCMSmpdIRM9LnwIrSOaRfPtNpF5msgv6Eg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/mocha/node_modules/ansi-colors": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } @@ -9030,6 +5844,7 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", "dev": true, + "license": "MIT", "dependencies": { "ms": "2.1.2" }, @@ -9046,13 +5861,15 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/mocha/node_modules/glob": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", "dev": true, + "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -9073,6 +5890,7 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -9085,84 +5903,34 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-4.2.1.tgz", "integrity": "sha512-9Uq1ChtSZO+Mxa/CL1eGizn2vRn3MlLgzhT0Iz8zaY8NdvxvB0d5QdPFmCKf7JKA9Lerx5vRrnwO03jsSfGG9g==", "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/mocha/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true - }, - "node_modules/module-deps": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/module-deps/-/module-deps-6.2.3.tgz", - "integrity": "sha512-fg7OZaQBcL4/L+AK5f4iVqf9OMbCclXfy/znXRxTVhJSeW5AIlS9AwheYwDaXM3lVW7OBeaeUEY3gbaC6cLlSA==", - "dev": true, - "dependencies": { - "browser-resolve": "^2.0.0", - "cached-path-relative": "^1.0.2", - "concat-stream": "~1.6.0", - "defined": "^1.0.0", - "detective": "^5.2.0", - "duplexer2": "^0.1.2", - "inherits": "^2.0.1", - "JSONStream": "^1.0.3", - "parents": "^1.0.0", - "readable-stream": "^2.0.2", - "resolve": "^1.4.0", - "stream-combiner2": "^1.1.1", - "subarg": "^1.0.0", - "through2": "^2.0.0", - "xtend": "^4.0.0" - }, - "bin": { - "module-deps": "bin/cmd.js" + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" }, "engines": { - "node": ">= 0.8.0" + "node": ">=10" } }, - "node_modules/module-deps/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "node_modules/mocha/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } + "license": "MIT" }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "node_modules/mute-stdout": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mute-stdout/-/mute-stdout-1.0.1.tgz", - "integrity": "sha512-kDcwXR4PS7caBpuRYYBUz9iVixUk3anO3f5OYFiIPwK/20vCzKCHyKoulbiDY1S53zD2bxUpxN/IJ+TnXjfvxg==", "dev": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", - "dev": true + "license": "MIT" }, "node_modules/nanoid": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.1.tgz", "integrity": "sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw==", "dev": true, + "license": "MIT", "bin": { "nanoid": "bin/nanoid.cjs" }, @@ -9170,66 +5938,26 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, - "node_modules/nanomatch": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", - "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", - "dev": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/nanomatch/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/natural-compare-lite": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", - "dev": true - }, - "node_modules/next-tick": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz", - "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==", - "dev": true - }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/nock": { "version": "13.3.0", "resolved": "https://registry.npmjs.org/nock/-/nock-13.3.0.tgz", "integrity": "sha512-HHqYQ6mBeiMc+N038w8LkMpDCRquCHWeNmN3v6645P3NhN2+qXOBqvPqo7Rt1VyCMzKhJ733wZqw5B7cQVFNPg==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^4.1.0", "json-stringify-safe": "^5.0.1", @@ -9244,7 +5972,8 @@ "version": "2.0.10", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.10.tgz", "integrity": "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/normalize-package-data": { "version": "2.5.0", @@ -9272,22 +6001,11 @@ "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/now-and-later": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/now-and-later/-/now-and-later-2.0.1.tgz", - "integrity": "sha512-KGvQ0cB70AQfg107Xvs/Fbu+dGmZoTRJp2TaPwcwQm3/7PteUyN2BCgk8KBMPGBUXZdVwyWS8fDCGFygBm19UQ==", - "dev": true, - "dependencies": { - "once": "^1.3.2" - }, - "engines": { - "node": ">= 0.10" - } - }, "node_modules/npm-run-path": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.1.0.tgz", @@ -9315,114 +6033,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", - "integrity": "sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ==", - "dev": true, - "dependencies": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/is-descriptor/node_modules/kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/object-inspect": { "version": "1.12.3", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", "dev": true, + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -9436,18 +6052,6 @@ "node": ">= 0.4" } }, - "node_modules/object-visit": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", - "integrity": "sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA==", - "dev": true, - "dependencies": { - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/object.assign": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", @@ -9466,57 +6070,21 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.defaults": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/object.defaults/-/object.defaults-1.1.0.tgz", - "integrity": "sha512-c/K0mw/F11k4dEUBMW8naXUuBuhxRCfG7W+yFy8EcijU/rSmazOUd1XAEEe6bC0OuXY4HUKjTJv7xbxIMqdxrA==", - "dev": true, - "dependencies": { - "array-each": "^1.0.1", - "array-slice": "^1.0.0", - "for-own": "^1.0.0", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object.map": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object.map/-/object.map-1.0.1.tgz", - "integrity": "sha512-3+mAJu2PLfnSVGHwIWubpOFLscJANBKuB/6A4CxBstc4aqwQY0FWcsppuy4jU5GSB95yES5JHSI+33AWuS4k6w==", - "dev": true, - "dependencies": { - "for-own": "^1.0.0", - "make-iterator": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ==", + "node_modules/object.values": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", + "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", "dev": true, "dependencies": { - "isobject": "^3.0.1" + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object.reduce": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object.reduce/-/object.reduce-1.0.1.tgz", - "integrity": "sha512-naLhxxpUESbNkRqc35oQ2scZSJueHGQNUfMW/0U37IgN6tE2dgDWg3whf+NEliy3F/QysrO48XKUz/nGPe+AQw==", - "dev": true, - "dependencies": { - "for-own": "^1.0.0", - "make-iterator": "^1.0.0" + "node": ">= 0.4" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/once": { @@ -9524,6 +6092,7 @@ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "dev": true, + "license": "ISC", "dependencies": { "wrappy": "1" } @@ -9543,11 +6112,29 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "dev": true, + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/optionator": { "version": "0.9.1", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", "dev": true, + "license": "MIT", "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", @@ -9564,49 +6151,15 @@ "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true - }, - "node_modules/ordered-read-streams": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/ordered-read-streams/-/ordered-read-streams-1.0.1.tgz", - "integrity": "sha512-Z87aSjx3r5c0ZB7bcJqIgIRX5bxR7A4aSzvIbaxd0oTkWBCOoKfuGHiKj60CHVUgg1Phm5yMZzBdt8XqRs73Mw==", - "dev": true, - "dependencies": { - "readable-stream": "^2.0.1" - } - }, - "node_modules/os-browserify": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", - "integrity": "sha512-gjcpUc3clBf9+210TRaDWbf+rZZZEshZ+DlXMRCeAjp0xhTrnQsKHypIy1J3d5hKdUzj69t708EHtU8P6bUn0A==", - "dev": true - }, - "node_modules/os-locale": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz", - "integrity": "sha512-PRT7ZORmwu2MEFt4/fv3Q+mEfN4zetKxufQrkShY2oGvUms9r8otu5HfdyIFHkYXjO7laNsoVGmM2MANfuTA8g==", "dev": true, - "dependencies": { - "lcid": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/os-tmpdir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "license": "MIT" }, "node_modules/p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, + "license": "MIT", "dependencies": { "yocto-queue": "^0.1.0" }, @@ -9622,6 +6175,7 @@ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, + "license": "MIT", "dependencies": { "p-limit": "^3.0.2" }, @@ -9647,17 +6201,21 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/pako": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", - "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", - "dev": true + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dev": true, + "license": "MIT", "dependencies": { "callsites": "^3.0.0" }, @@ -9665,97 +6223,30 @@ "node": ">=6" } }, - "node_modules/parents": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parents/-/parents-1.0.1.tgz", - "integrity": "sha512-mXKF3xkoUt5td2DoxpLmtOmZvko9VfFpwRwkKDHSNvgmpLAeBo18YDhcPbBzJq+QLCHMbGOfzia2cX4U+0v9Mg==", - "dev": true, - "dependencies": { - "path-platform": "~0.11.15" - } - }, - "node_modules/parse-asn1": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz", - "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==", - "dependencies": { - "asn1.js": "^5.2.0", - "browserify-aes": "^1.0.0", - "evp_bytestokey": "^1.0.0", - "pbkdf2": "^3.0.3", - "safe-buffer": "^5.1.1" - } - }, - "node_modules/parse-filepath": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/parse-filepath/-/parse-filepath-1.0.2.tgz", - "integrity": "sha512-FwdRXKCohSVeXqwtYonZTXtbGJKrn+HNyWDYVcp5yuJlesTwNH4rsmRZ+GrKAPJ5bLpRxESMeS+Rl0VCHRvB2Q==", - "dev": true, - "dependencies": { - "is-absolute": "^1.0.0", - "map-cache": "^0.2.0", - "path-root": "^0.1.1" - }, - "engines": { - "node": ">=0.8" - } - }, "node_modules/parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", "dev": true, "dependencies": { - "error-ex": "^1.2.0" + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/parse-node-version": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parse-node-version/-/parse-node-version-1.0.1.tgz", - "integrity": "sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/parse-passwd": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz", - "integrity": "sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pascalcase": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", - "integrity": "sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/path-browserify": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.1.tgz", - "integrity": "sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ==", - "dev": true - }, - "node_modules/path-dirname": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz", - "integrity": "sha512-ALzNPpyNq9AqXMBjeymIjFDAkAFH06mHJH/cSBHAgU0s4vfpBn6b2nf8tiRLvagKD8RbTpq2FKTBg7cl9l3c7Q==", - "dev": true - }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -9765,6 +6256,7 @@ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -9774,6 +6266,7 @@ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -9782,43 +6275,15 @@ "version": "1.0.7", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true - }, - "node_modules/path-platform": { - "version": "0.11.15", - "resolved": "https://registry.npmjs.org/path-platform/-/path-platform-0.11.15.tgz", - "integrity": "sha512-Y30dB6rab1A/nfEKsZxmr01nUotHX0c/ZiIAsCTatEe1CmS5Pm5He7fZ195bPT7RdquoaL8lLxFCMQi/bS7IJg==", "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/path-root": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/path-root/-/path-root-0.1.1.tgz", - "integrity": "sha512-QLcPegTHF11axjfojBIoDygmS2E3Lf+8+jI6wOVmNVenrKSo3mFdSGiIgdSHenczw3wPtlVMQaFVwGmM7BJdtg==", - "dev": true, - "dependencies": { - "path-root-regex": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-root-regex": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/path-root-regex/-/path-root-regex-0.1.2.tgz", - "integrity": "sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } + "license": "MIT" }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -9828,36 +6293,24 @@ "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", "dev": true, + "license": "MIT", "engines": { "node": "*" } }, - "node_modules/pbkdf2": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", - "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", - "dependencies": { - "create-hash": "^1.1.2", - "create-hmac": "^1.1.4", - "ripemd160": "^2.0.1", - "safe-buffer": "^5.0.1", - "sha.js": "^2.4.8" - }, - "engines": { - "node": ">=0.12" - } - }, "node_modules/picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8.6" }, @@ -9878,57 +6331,103 @@ } }, "node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=6" } }, - "node_modules/pinkie": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", - "integrity": "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==", + "node_modules/pirates": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">= 6" } }, - "node_modules/pinkie-promise": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", - "integrity": "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==", + "node_modules/pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", "dev": true, "dependencies": { - "pinkie": "^2.0.0" + "find-up": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=6" } }, - "node_modules/plugin-error": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/plugin-error/-/plugin-error-1.0.1.tgz", - "integrity": "sha512-L1zP0dk7vGweZME2i+EeakvUNqSrdiI3F91TwEoYiGrAfUXmVv6fJIq4g82PAXxNsWOp0J7ZqQy/3Szz0ajTxA==", + "node_modules/pkg-dir/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dev": true, "dependencies": { - "ansi-colors": "^1.0.1", - "arr-diff": "^4.0.0", - "arr-union": "^3.1.0", - "extend-shallow": "^3.0.2" + "locate-path": "^3.0.0" }, "engines": { - "node": ">= 0.10" + "node": ">=6" } }, - "node_modules/posix-character-classes": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", - "integrity": "sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg==", + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dev": true, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">=6" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-dir/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/pluralize": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", + "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", + "dev": true, + "engines": { + "node": ">=4" } }, "node_modules/prelude-ls": { @@ -9936,6 +6435,7 @@ "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.8.0" } @@ -9945,6 +6445,7 @@ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.7.tgz", "integrity": "sha512-yPngTo3aXUUmyuTjeTUT75txrf+aMh9FiD7q9ZE/i6r0bPb22g4FsE6Y338PQX1bmfy08i9QQCB7/rcUAVntfw==", "dev": true, + "license": "MIT", "bin": { "prettier": "bin-prettier.js" }, @@ -9955,105 +6456,16 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, - "node_modules/pretty-hrtime": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz", - "integrity": "sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==", - "dev": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true, - "engines": { - "node": ">= 0.6.0" - } - }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true - }, - "node_modules/progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/propagate": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", "dev": true, + "license": "MIT", "engines": { "node": ">= 8" } }, - "node_modules/public-encrypt": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", - "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==", - "dependencies": { - "bn.js": "^4.1.0", - "browserify-rsa": "^4.0.0", - "create-hash": "^1.1.0", - "parse-asn1": "^5.0.0", - "randombytes": "^2.0.1", - "safe-buffer": "^5.1.2" - } - }, - "node_modules/public-encrypt/node_modules/bn.js": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", - "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" - }, - "node_modules/pump": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz", - "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "node_modules/pumpify": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-1.5.1.tgz", - "integrity": "sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==", - "dev": true, - "dependencies": { - "duplexify": "^3.6.0", - "inherits": "^2.0.3", - "pump": "^2.0.0" - } - }, - "node_modules/pumpify/node_modules/duplexify": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", - "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", - "stream-shift": "^1.0.0" - } - }, - "node_modules/punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", - "dev": true - }, "node_modules/qs": { "version": "6.11.1", "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.1.tgz", @@ -10073,6 +6485,7 @@ "version": "7.1.3", "resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz", "integrity": "sha512-hh2WYhq4fi8+b+/2Kg9CEge4fDPvHS534aOOvOZeQ3+Vf2mCFsaFBYj0i+iXcAq6I9Vzp5fjMFBlONvayDC1qg==", + "license": "MIT", "dependencies": { "decode-uri-component": "^0.2.2", "filter-obj": "^1.1.0", @@ -10086,25 +6499,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==", - "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", - "dev": true, - "engines": { - "node": ">=0.4.x" - } - }, - "node_modules/querystring-es3": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", - "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", - "dev": true, - "engines": { - "node": ">=0.4.x" - } - }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -10123,128 +6517,132 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", - "dependencies": { - "safe-buffer": "^5.1.0" - } - }, - "node_modules/randomfill": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz", - "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==", - "dependencies": { - "randombytes": "^2.0.5", - "safe-buffer": "^5.1.0" - } - }, - "node_modules/read-only-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-only-stream/-/read-only-stream-2.0.0.tgz", - "integrity": "sha512-3ALe0bjBVZtkdWKIcThYpQCLbBMd/+Tbh2CDSrAIDO3UsZ4Xs+tnyjv2MjCOMMgBG+AsUOeuP1cgtY1INISc8w==", "dev": true, + "license": "MIT", "dependencies": { - "readable-stream": "^2.0.2" + "safe-buffer": "^5.1.0" } }, "node_modules/read-pkg": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", - "integrity": "sha512-7BGwRHqt4s/uVbuyoeejRn4YmFnYZiFl4AuaeXHlgZf3sONF0SOGlxs2Pw8g6hCKupo08RafIO5YXFNOKTfwsQ==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", "dev": true, "dependencies": { - "load-json-file": "^1.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^1.0.0" + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/read-pkg-up": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", - "integrity": "sha512-WD9MTlNtI55IwYUS27iHh9tK3YoIVhxis8yKhLpTqWtml739uXc9NWTpxoHkfZf3+DkCCsXox94/VWZniuZm6A==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", "dev": true, "dependencies": { - "find-up": "^1.0.0", - "read-pkg": "^1.0.0" + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/read-pkg-up/node_modules/find-up": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", - "integrity": "sha512-jvElSjyuo4EMQGoTwo1uJU5pQMwTW5lS1x05zzfJuTIyLR3zwO27LYrxNg+dlvKpGOuGy/MzBdXh80g0ve5+HA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "dependencies": { - "path-exists": "^2.0.0", - "pinkie-promise": "^2.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/read-pkg-up/node_modules/path-exists": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", - "integrity": "sha512-yTltuKuhtNeFJKa1PiRzfLAU5182q1y4Eb4XCJ3PBqyzEDkAZRzBrKKBct682ls9reBVHf9udYLN5Nd+K1B9BQ==", + "node_modules/read-pkg-up/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "dependencies": { - "pinkie-promise": "^2.0.0" + "p-locate": "^4.1.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/read-pkg/node_modules/path-type": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", - "integrity": "sha512-S4eENJz1pkiQn9Znv33Q+deTOKmbl+jj1Fl+qiP/vYezj+S8x+J3Uo0ISrx/QoEvIlOaDWJhPaRd1flJ9HXZqg==", + "node_modules/read-pkg-up/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, "dependencies": { - "graceful-fs": "^4.1.2", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" + "p-try": "^2.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "node_modules/read-pkg-up/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" } }, - "node_modules/readable-stream/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } }, - "node_modules/readable-stream/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "dependencies": { - "safe-buffer": "~5.1.0" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" } }, "node_modules/readdirp": { @@ -10252,6 +6650,7 @@ "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "dev": true, + "license": "MIT", "dependencies": { "picomatch": "^2.2.1" }, @@ -10259,29 +6658,19 @@ "node": ">=8.10.0" } }, - "node_modules/rechoir": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", - "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", - "dev": true, - "dependencies": { - "resolve": "^1.1.6" - }, - "engines": { - "node": ">= 0.10" - } - }, "node_modules/regenerate": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/regenerate-unicode-properties": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz", "integrity": "sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ==", "dev": true, + "license": "MIT", "dependencies": { "regenerate": "^1.4.2" }, @@ -10293,37 +6682,43 @@ "version": "0.13.11", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/regenerator-transform": { "version": "0.15.1", "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.1.tgz", "integrity": "sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg==", "dev": true, + "license": "MIT", "dependencies": { "@babel/runtime": "^7.8.4" } }, - "node_modules/regex-not": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", - "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", + "node_modules/regexp-tree": { + "version": "0.1.24", + "resolved": "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.24.tgz", + "integrity": "sha512-s2aEVuLhvnVJW6s/iPgEGK6R+/xngd2jNQ+xy4bXNDKxZKJH6jpPHY6kVeVv1IeLCHgswRj+Kl3ELaDjG6V1iw==", "dev": true, - "dependencies": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" + "bin": { + "regexp-tree": "bin/regexp-tree" } }, - "node_modules/regexpp": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", - "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "node_modules/regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + }, "engines": { - "node": ">=6.5.0" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/regexpu-core": { @@ -10331,6 +6726,7 @@ "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.3.2.tgz", "integrity": "sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==", "dev": true, + "license": "MIT", "dependencies": { "@babel/regjsgen": "^0.8.0", "regenerate": "^1.4.2", @@ -10348,104 +6744,21 @@ "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz", "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "jsesc": "~0.5.0" }, - "bin": { - "regjsparser": "bin/parser" - } - }, - "node_modules/regjsparser/node_modules/jsesc": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", - "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" - } - }, - "node_modules/remove-bom-buffer": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remove-bom-buffer/-/remove-bom-buffer-3.0.0.tgz", - "integrity": "sha512-8v2rWhaakv18qcvNeli2mZ/TMTL2nEyAKRvzo1WtnZBl15SHyEhrCu2/xKlJyUFKHiHgfXIyuY6g2dObJJycXQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5", - "is-utf8": "^0.2.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/remove-bom-stream": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/remove-bom-stream/-/remove-bom-stream-1.2.0.tgz", - "integrity": "sha512-wigO8/O08XHb8YPzpDDT+QmRANfW6vLqxfaXm1YXhnFf3AkSLyjfG3GEFg4McZkmgL7KvCj5u2KczkvSP6NfHA==", - "dev": true, - "dependencies": { - "remove-bom-buffer": "^3.0.0", - "safe-buffer": "^5.1.0", - "through2": "^2.0.3" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/remove-bom-stream/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/remove-trailing-separator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", - "integrity": "sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==", - "dev": true - }, - "node_modules/repeat-element": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz", - "integrity": "sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", - "dev": true, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/replace-ext": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-1.0.1.tgz", - "integrity": "sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw==", - "dev": true, - "engines": { - "node": ">= 0.10" + "bin": { + "regjsparser": "bin/parser" } }, - "node_modules/replace-homedir": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/replace-homedir/-/replace-homedir-1.0.0.tgz", - "integrity": "sha512-CHPV/GAglbIB1tnQgaiysb8H2yCy8WQ7lcEwQ/eT+kLj0QHV8LnJW0zpqpE7RSkrMSRoa+EBoag86clf7WAgSg==", + "node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", "dev": true, - "dependencies": { - "homedir-polyfill": "^1.0.1", - "is-absolute": "^1.0.0", - "remove-trailing-separator": "^1.1.0" - }, - "engines": { - "node": ">= 0.10" + "bin": { + "jsesc": "bin/jsesc" } }, "node_modules/require-directory": { @@ -10453,21 +6766,17 @@ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/require-main-filename": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", - "integrity": "sha512-IqSUtOVP4ksd1C/ej5zeEh/BIP2ajqpn8c5x+q99gvcIG/Qf0cud5raVnE/Dwd0ua9TXYDoDc0RE5hBSdz22Ug==", - "dev": true - }, "node_modules/resolve": { "version": "1.22.2", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz", "integrity": "sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==", "dev": true, + "license": "MIT", "dependencies": { "is-core-module": "^2.11.0", "path-parse": "^1.0.7", @@ -10480,47 +6789,16 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/resolve-dir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/resolve-dir/-/resolve-dir-1.0.1.tgz", - "integrity": "sha512-R7uiTjECzvOsWSfdM0QKFNBVFcK27aHOUwdvK53BcW8zqnGdYp0Fbj82cy54+2A4P2tFM22J5kRfe1R+lM/1yg==", - "dev": true, - "dependencies": { - "expand-tilde": "^2.0.0", - "global-modules": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, - "node_modules/resolve-options": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/resolve-options/-/resolve-options-1.1.0.tgz", - "integrity": "sha512-NYDgziiroVeDC29xq7bp/CacZERYsA9bXYd1ZmcJlF3BcrZv5pTb4NG7SjdyKDnXZ84aC4vo2u6sNKIA1LCu/A==", - "dev": true, - "dependencies": { - "value-or-function": "^3.0.0" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/resolve-url": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", - "integrity": "sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg==", - "deprecated": "https://github.com/lydell/resolve-url#deprecated", - "dev": true - }, "node_modules/restore-cursor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", @@ -10558,20 +6836,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ret": { - "version": "0.1.15", - "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", - "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", - "dev": true, - "engines": { - "node": ">=0.12" - } - }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", "dev": true, + "license": "MIT", "engines": { "iojs": ">=1.0.0", "node": ">=0.10.0" @@ -10583,39 +6853,6 @@ "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", "dev": true }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/ripemd160": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", - "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", - "dependencies": { - "hash-base": "^3.0.0", - "inherits": "^2.0.1" - } - }, - "node_modules/run-async": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", - "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -10635,6 +6872,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "queue-microtask": "^1.2.2" } @@ -10669,437 +6907,150 @@ }, { "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/safe-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", - "integrity": "sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg==", - "dev": true, - "dependencies": { - "ret": "~0.1.10" - } - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "node_modules/sax": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", - "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" - }, - "node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/semver-greatest-satisfied-range": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/semver-greatest-satisfied-range/-/semver-greatest-satisfied-range-1.1.0.tgz", - "integrity": "sha512-Ny/iyOzSSa8M5ML46IAx3iXc6tfOsYU2R4AXi2UpHk60Zrgyq6eqPj/xiOfS0rRl/iiQ/rdJkVjw/5cdUyCntQ==", - "dev": true, - "dependencies": { - "sver-compat": "^1.5.0" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/serialize-javascript": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", - "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", - "dev": true, - "dependencies": { - "randombytes": "^2.1.0" - } - }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", - "dev": true - }, - "node_modules/set-value": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", - "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/set-value/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/set-value/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/set-value/node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/sha.js": { - "version": "2.4.11", - "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", - "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", - "dependencies": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" - }, - "bin": { - "sha.js": "bin.js" - } - }, - "node_modules/shasum": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/shasum/-/shasum-1.0.2.tgz", - "integrity": "sha512-UTzHm/+AzKfO9RgPgRpDIuMSNie1ubXRaljjlhFMNGYoG7z+rm9AHLPMf70R7887xboDH9Q+5YQbWKObFHEAtw==", - "dev": true, - "dependencies": { - "json-stable-stringify": "~0.0.0", - "sha.js": "~2.4.4" - } - }, - "node_modules/shasum-object": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shasum-object/-/shasum-object-1.0.0.tgz", - "integrity": "sha512-Iqo5rp/3xVi6M4YheapzZhhGPVs0yZwHj7wvwQ1B9z8H6zk+FEnI7y3Teq7qwnekfEhu8WmG2z0z4iWZaxLWVg==", - "dev": true, - "dependencies": { - "fast-safe-stringify": "^2.0.7" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/shell-quote": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.1.tgz", - "integrity": "sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true - }, - "node_modules/simple-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", - "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/slice-ansi": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", - "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^6.0.0", - "is-fullwidth-code-point": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, - "node_modules/slice-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/snapdragon": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", - "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", - "dev": true, - "dependencies": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" - }, - "engines": { - "node": ">=0.10.0" - } + "url": "https://feross.org/support" + } + ], + "license": "MIT" }, - "node_modules/snapdragon-node": { + "node_modules/safe-regex": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", - "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-2.1.1.tgz", + "integrity": "sha512-rx+x8AMzKb5Q5lQ95Zoi6ZbJqwCLkqi3XuJXp5P3rT8OEc6sZCJG5AE5dU3lsgRr/F4Bs31jSlVN+j5KrsGu9A==", "dev": true, "dependencies": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" + "regexp-tree": "~0.1.1" } }, - "node_modules/snapdragon-node/node_modules/define-property": { + "node_modules/safe-regex-test": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", + "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", "dev": true, "dependencies": { - "is-descriptor": "^1.0.0" + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "is-regex": "^1.1.4" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/snapdragon-util": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", - "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", - "dev": true, - "dependencies": { - "kind-of": "^3.2.0" - }, - "engines": { - "node": ">=0.10.0" - } + "node_modules/sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", + "license": "ISC" }, - "node_modules/snapdragon-util/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" + "bin": { + "semver": "bin/semver.js" } }, - "node_modules/snapdragon/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", "dev": true, + "license": "BSD-3-Clause", "dependencies": { - "ms": "2.0.0" + "randombytes": "^2.1.0" } }, - "node_modules/snapdragon/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", + "node_modules/shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", "dev": true, "dependencies": { - "is-descriptor": "^0.1.0" + "kind-of": "^6.0.2" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/snapdragon/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dev": true, + "license": "MIT", "dependencies": { - "is-extendable": "^0.1.0" + "shebang-regex": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/snapdragon/node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A==", + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/snapdragon/node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", "dev": true, "dependencies": { - "is-buffer": "^1.1.5" + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" }, - "engines": { - "node": ">=0.10.0" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/snapdragon/node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true }, - "node_modules/snapdragon/node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/snapdragon/node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", + "node_modules/slice-ansi": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", + "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", "dev": true, "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" + "ansi-styles": "^6.0.0", + "is-fullwidth-code-point": "^4.0.0" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" } }, - "node_modules/snapdragon/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true - }, - "node_modules/snapdragon/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/source-map": { @@ -11107,50 +7058,22 @@ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true, + "license": "BSD-3-Clause", "engines": { "node": ">=0.10.0" } }, - "node_modules/source-map-resolve": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", - "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", - "deprecated": "See https://github.com/lydell/source-map-resolve#deprecated", - "dev": true, - "dependencies": { - "atob": "^2.1.2", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" - } - }, "node_modules/source-map-support": { "version": "0.5.21", "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "dev": true, + "license": "MIT", "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, - "node_modules/source-map-url": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz", - "integrity": "sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==", - "deprecated": "See https://github.com/lydell/source-map-url#deprecated", - "dev": true - }, - "node_modules/sparkles": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/sparkles/-/sparkles-1.0.1.tgz", - "integrity": "sha512-dSO0DDYUahUt/0/pD/Is3VIm5TGJjludZ0HVymmhYF6eNA53PVLhnUk0znSYbH8IYBuJdCE+1luR22jNLMaQdw==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, "node_modules/spdx-correct": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", @@ -11173,222 +7096,43 @@ "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", "dev": true, "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.13", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz", - "integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==", - "dev": true - }, - "node_modules/split-file": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/split-file/-/split-file-2.3.0.tgz", - "integrity": "sha512-dc/0SDKvjtSjUI999vkclWQAk5xhD86pKEWWL2ULR6WrHI9/euIEMG/JSUbwbNW8IC+gYLJqynSGHwlOVmSwGA==", - "dev": true, - "dependencies": { - "bluebird": "^3.7.2" - }, - "bin": { - "split-file": "split-file-cli.js" - } - }, - "node_modules/split-on-first": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", - "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==", - "engines": { - "node": ">=6" - } - }, - "node_modules/split-string": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", - "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", - "dev": true, - "dependencies": { - "extend-shallow": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true - }, - "node_modules/stack-trace": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/static-extend": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", - "integrity": "sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g==", - "dev": true, - "dependencies": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend/node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend/node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend/node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend/node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend/node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/stream-browserify": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.2.tgz", - "integrity": "sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg==", - "dev": true, - "dependencies": { - "inherits": "~2.0.1", - "readable-stream": "^2.0.2" - } - }, - "node_modules/stream-combiner2": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stream-combiner2/-/stream-combiner2-1.1.1.tgz", - "integrity": "sha512-3PnJbYgS56AeWgtKF5jtJRT6uFJe56Z0Hc5Ngg/6sI6rIt8iiMBTa9cvdyFfpMQjaVHr8dusbNeFGIIonxOvKw==", - "dev": true, - "dependencies": { - "duplexer2": "~0.1.0", - "readable-stream": "^2.0.2" - } - }, - "node_modules/stream-exhaust": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/stream-exhaust/-/stream-exhaust-1.0.2.tgz", - "integrity": "sha512-b/qaq/GlBK5xaq1yrK9/zFcyRSTNxmcZwFLGSTG0mXgZl/4Z6GgiyYOXOvY7N3eEvFRAG1bkDRz5EPGSvPYQlw==", - "dev": true - }, - "node_modules/stream-http": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-3.2.0.tgz", - "integrity": "sha512-Oq1bLqisTyK3TSCXpPbT4sdeYNdmyZJv1LxpEm2vu1ZhK89kSE5YXwZc3cWk0MagGaKriBh9mCFbVGtO+vY29A==", - "dev": true, - "dependencies": { - "builtin-status-codes": "^3.0.0", - "inherits": "^2.0.4", - "readable-stream": "^3.6.0", - "xtend": "^4.0.2" - } - }, - "node_modules/stream-http/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" } }, - "node_modules/stream-shift": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", - "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==", + "node_modules/spdx-license-ids": { + "version": "3.0.13", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz", + "integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==", "dev": true }, - "node_modules/stream-splicer": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/stream-splicer/-/stream-splicer-2.0.1.tgz", - "integrity": "sha512-Xizh4/NPuYSyAXyT7g8IvdJ9HJpxIGL9PjyhtywCZvvP0OPIdqyrr4dMikeuvY8xahpdKEBlBTySe583totajg==", + "node_modules/split-file": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/split-file/-/split-file-2.3.0.tgz", + "integrity": "sha512-dc/0SDKvjtSjUI999vkclWQAk5xhD86pKEWWL2ULR6WrHI9/euIEMG/JSUbwbNW8IC+gYLJqynSGHwlOVmSwGA==", "dev": true, + "license": "MIT", "dependencies": { - "inherits": "^2.0.1", - "readable-stream": "^2.0.2" + "bluebird": "^3.7.2" + }, + "bin": { + "split-file": "split-file-cli.js" + } + }, + "node_modules/split-on-first": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", + "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==", + "license": "MIT", + "engines": { + "node": ">=6" } }, "node_modules/strict-uri-encode": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", "integrity": "sha512-QwiXZgpRcKkhTj2Scnn++4PKtWsH0kpzZ62L2R6c/LUVYv7hVnZqcg2+sMuT6R7Jusu1vviK/MFsu6kNJfWlEQ==", + "license": "MIT", "engines": { "node": ">=4" } @@ -11397,6 +7141,7 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", "dependencies": { "safe-buffer": "~5.2.0" } @@ -11460,11 +7205,57 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, + "node_modules/string.prototype.trim": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", + "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", + "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", + "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, + "license": "MIT", "dependencies": { "ansi-regex": "^5.0.1" }, @@ -11473,24 +7264,12 @@ } }, "node_modules/strip-bom": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", - "integrity": "sha512-kwrX1y7czp1E69n2ajbG65mIo9dqvJ+8aBQXOGVxqwvNbsXdFM6Lq37dLAY3mknUwru8CfcCbfOLL/gMo+fi3g==", - "dev": true, - "dependencies": { - "is-utf8": "^0.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/strip-bom-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", - "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, "node_modules/strip-final-newline": { @@ -11505,11 +7284,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" }, @@ -11520,16 +7312,8 @@ "node_modules/strnum": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", - "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" - }, - "node_modules/subarg": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/subarg/-/subarg-1.0.0.tgz", - "integrity": "sha512-RIrIdRY0X1xojthNcVtgT9sjpOGagEUKpZdgBUi054OEPFo282yg+zE+t1Rj3+RqKq2xStL7uUHhY+AjbC4BXg==", - "dev": true, - "dependencies": { - "minimist": "^1.1.0" - } + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==", + "license": "MIT" }, "node_modules/superagent": { "version": "5.3.1", @@ -11559,6 +7343,7 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -11566,25 +7351,12 @@ "node": ">=10" } }, - "node_modules/superagent/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/superagent/node_modules/semver": { "version": "7.3.8", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "dev": true, + "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -11599,13 +7371,15 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, + "license": "MIT", "dependencies": { "has-flag": "^4.0.0" }, @@ -11621,6 +7395,7 @@ "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -11628,308 +7403,79 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/sver-compat": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/sver-compat/-/sver-compat-1.5.0.tgz", - "integrity": "sha512-aFTHfmjwizMNlNE6dsGmoAM4lHjL0CyiobWaFiXWSlD7cIxshW422Nb8KbXCmR6z+0ZEPY+daXJrDyh/vuwTyg==", - "dev": true, - "dependencies": { - "es6-iterator": "^2.0.1", - "es6-symbol": "^3.1.1" - } - }, - "node_modules/syntax-error": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/syntax-error/-/syntax-error-1.4.0.tgz", - "integrity": "sha512-YPPlu67mdnHGTup2A8ff7BC2Pjq0e0Yp/IyTFN03zWO0RcK07uLcbi7C2KpGR2FvWbaB0+bfE27a+sBKebSo7w==", - "dev": true, - "dependencies": { - "acorn-node": "^1.2.0" - } - }, - "node_modules/table": { - "version": "5.4.6", - "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", - "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "node_modules/synckit": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.5.tgz", + "integrity": "sha512-L1dapNV6vu2s/4Sputv8xGsCdAVlb5nRDMFU/E27D44l5U6cw1g0dGd45uLc+OXjNMmF4ntiMdCimzcjFKQI8Q==", "dev": true, "dependencies": { - "ajv": "^6.10.2", - "lodash": "^4.17.14", - "slice-ansi": "^2.1.0", - "string-width": "^3.0.0" + "@pkgr/utils": "^2.3.1", + "tslib": "^2.5.0" }, "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/table/node_modules/ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/table/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" + "node": "^14.18.0 || >=16.0.0" }, - "engines": { - "node": ">=4" - } - }, - "node_modules/table/node_modules/astral-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", - "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/table/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" + "funding": { + "url": "https://opencollective.com/unts" } }, - "node_modules/table/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/table/node_modules/emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "node_modules/synckit/node_modules/tslib": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", + "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==", "dev": true }, - "node_modules/table/node_modules/is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/table/node_modules/slice-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", - "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.0", - "astral-regex": "^1.0.0", - "is-fullwidth-code-point": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/table/node_modules/string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "dependencies": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/table/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", "dev": true, - "dependencies": { - "ansi-regex": "^4.1.0" - }, "engines": { "node": ">=6" } }, - "node_modules/ternary-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ternary-stream/-/ternary-stream-3.0.0.tgz", - "integrity": "sha512-oIzdi+UL/JdktkT+7KU5tSIQjj8pbShj3OASuvDEhm0NT5lppsm7aXWAmAq4/QMaBIyfuEcNLbAQA+HpaISobQ==", - "dev": true, - "dependencies": { - "duplexify": "^4.1.1", - "fork-stream": "^0.0.4", - "merge-stream": "^2.0.0", - "through2": "^3.0.1" - } - }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/through2": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz", "integrity": "sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ==", + "license": "MIT", "dependencies": { "inherits": "^2.0.4", "readable-stream": "2 || 3" } }, - "node_modules/through2-filter": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/through2-filter/-/through2-filter-3.0.0.tgz", - "integrity": "sha512-jaRjI2WxN3W1V8/FMZ9HKIBXixtiqs3SQSX4/YGIiP3gL6djW48VoZq9tDqeCWs3MT8YY5wb/zli8VW8snY1CA==", - "dev": true, - "dependencies": { - "through2": "~2.0.0", - "xtend": "~4.0.0" - } - }, - "node_modules/through2-filter/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/through2/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/time-stamp": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/time-stamp/-/time-stamp-1.1.0.tgz", - "integrity": "sha512-gLCeArryy2yNTRzTGKbZbloctj64jkZ57hj5zdraXue6aFgd6PmvVtEyiUU+hvU0v7q08oVv8r8ev0tRo6bvgw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/timers-browserify": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-1.4.2.tgz", - "integrity": "sha512-PIxwAupJZiYU4JmVZYwXp9FKsHMXb5h0ZEFyuXTAn8WLHOlcij+FEcbrvDsom1o5dr1YggEtFbECvGCW2sT53Q==", - "dev": true, - "dependencies": { - "process": "~0.11.0" - }, - "engines": { - "node": ">=0.6.0" - } - }, - "node_modules/timers-ext": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/timers-ext/-/timers-ext-0.1.7.tgz", - "integrity": "sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ==", - "dev": true, - "dependencies": { - "es5-ext": "~0.10.46", - "next-tick": "1" - } - }, - "node_modules/tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", - "dev": true, - "dependencies": { - "os-tmpdir": "~1.0.2" - }, - "engines": { - "node": ">=0.6.0" - } - }, - "node_modules/to-absolute-glob": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/to-absolute-glob/-/to-absolute-glob-2.0.2.tgz", - "integrity": "sha512-rtwLUQEwT8ZeKQbyFJyomBRYXyE16U5VKuy0ftxLMK/PZb2fkOsg5r9kHdauuVDbsNdIBoC/HCthpidamQFXYA==", + "node_modules/tiny-glob": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/tiny-glob/-/tiny-glob-0.2.9.tgz", + "integrity": "sha512-g/55ssRPUjShh+xkfx9UPDXqhckHEsHr4Vd9zX55oSdGZc/MD0m3sferOkwWtp98bv+kcVfEHtRJgBVJzelrzg==", "dev": true, "dependencies": { - "is-absolute": "^1.0.0", - "is-negated-glob": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" + "globalyzer": "0.1.0", + "globrex": "^0.1.2" } }, "node_modules/to-fast-properties": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg==", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-object-path/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-regex": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", - "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", "dev": true, - "dependencies": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" - }, + "license": "MIT", "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, "node_modules/to-regex-range": { @@ -11937,6 +7483,7 @@ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dev": true, + "license": "MIT", "dependencies": { "is-number": "^7.0.0" }, @@ -11949,43 +7496,48 @@ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.12.0" } }, - "node_modules/to-through": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-through/-/to-through-2.0.0.tgz", - "integrity": "sha512-+QIz37Ly7acM4EMdw2PRN389OneM5+d844tirkGp4dPKzI5OE72V9OsbFp+CIYJDahZ41ZV05hNtcPAQUAm9/Q==", + "node_modules/tsconfig-paths": { + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", + "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", "dev": true, "dependencies": { - "through2": "^2.0.3" - }, - "engines": { - "node": ">= 0.10" + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" } }, - "node_modules/to-through/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "node_modules/tsconfig-paths/node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", "dev": true, "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" } }, "node_modules/tslib": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true + "dev": true, + "license": "0BSD" }, "node_modules/tsutils": { "version": "3.21.0", "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", "dev": true, + "license": "MIT", "dependencies": { "tslib": "^1.8.1" }, @@ -11996,23 +7548,12 @@ "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" } }, - "node_modules/tty-browserify": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz", - "integrity": "sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw==", - "dev": true - }, - "node_modules/type": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz", - "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==", - "dev": true - }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dev": true, + "license": "MIT", "dependencies": { "prelude-ls": "^1.2.1" }, @@ -12025,6 +7566,7 @@ "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -12034,6 +7576,7 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true, + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" }, @@ -12041,18 +7584,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/typedarray": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", - "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", - "dev": true + "node_modules/typed-array-length": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", + "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "is-typed-array": "^1.1.9" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/typescript": { "version": "5.0.4", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", "dev": true, - "peer": true, + "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -12061,68 +7612,19 @@ "node": ">=12.20" } }, - "node_modules/umd": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/umd/-/umd-3.0.3.tgz", - "integrity": "sha512-4IcGSufhFshvLNcMCV80UnQVlZ5pMOC8mvNPForqwA4+lzYQuetTESLDQkeLmihq8bRcnpbQa48Wb8Lh16/xow==", - "dev": true, - "bin": { - "umd": "bin/cli.js" - } - }, - "node_modules/unc-path-regex": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz", - "integrity": "sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/undeclared-identifiers": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/undeclared-identifiers/-/undeclared-identifiers-1.1.3.tgz", - "integrity": "sha512-pJOW4nxjlmfwKApE4zvxLScM/njmwj/DiUBv7EabwE4O8kRUy+HIwxQtZLBPll/jx1LJyBcqNfB3/cpv9EZwOw==", - "dev": true, - "dependencies": { - "acorn-node": "^1.3.0", - "dash-ast": "^1.0.0", - "get-assigned-identifiers": "^1.2.0", - "simple-concat": "^1.0.0", - "xtend": "^4.0.1" - }, - "bin": { - "undeclared-identifiers": "bin.js" - } - }, - "node_modules/undertaker": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/undertaker/-/undertaker-1.3.0.tgz", - "integrity": "sha512-/RXwi5m/Mu3H6IHQGww3GNt1PNXlbeCuclF2QYR14L/2CHPz3DFZkvB5hZ0N/QUkiXWCACML2jXViIQEQc2MLg==", + "node_modules/unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", "dev": true, "dependencies": { - "arr-flatten": "^1.0.1", - "arr-map": "^2.0.0", - "bach": "^1.0.0", - "collection-map": "^1.0.0", - "es6-weak-map": "^2.0.1", - "fast-levenshtein": "^1.0.0", - "last-run": "^1.1.0", - "object.defaults": "^1.0.0", - "object.reduce": "^1.0.0", - "undertaker-registry": "^1.0.0" + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/undertaker-registry": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/undertaker-registry/-/undertaker-registry-1.0.1.tgz", - "integrity": "sha512-UR1khWeAjugW3548EfQmL9Z7pGMlBgXteQpr1IZeZBtnkCJQJIJ1Scj0mb9wQaPvUZ9Q17XqW6TIaPchJkyfqw==", - "dev": true, - "engines": { - "node": ">= 0.10" + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/unicode-canonical-property-names-ecmascript": { @@ -12130,6 +7632,7 @@ "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -12139,6 +7642,7 @@ "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", "dev": true, + "license": "MIT", "dependencies": { "unicode-canonical-property-names-ecmascript": "^2.0.0", "unicode-property-aliases-ecmascript": "^2.0.0" @@ -12152,6 +7656,7 @@ "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz", "integrity": "sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } @@ -12161,102 +7666,11 @@ "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, - "node_modules/union-value": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", - "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", - "dev": true, - "dependencies": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^2.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/union-value/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unique-stream": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/unique-stream/-/unique-stream-2.3.1.tgz", - "integrity": "sha512-2nY4TnBE70yoxHkDli7DMazpWiP7xMdCYqU2nBRO0UB+ZpEkGsSija7MvmvnZFUeC+mrgiUfcHSr3LmRFIg4+A==", - "dev": true, - "dependencies": { - "json-stable-stringify-without-jsonify": "^1.0.1", - "through2-filter": "^3.0.0" - } - }, - "node_modules/unset-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", - "integrity": "sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ==", - "dev": true, - "dependencies": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", - "integrity": "sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q==", - "dev": true, - "dependencies": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value/node_modules/isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==", - "dev": true, - "dependencies": { - "isarray": "1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-values": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", - "integrity": "sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/upath": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", - "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", - "dev": true, - "engines": { - "node": ">=4", - "yarn": "*" - } - }, "node_modules/update-browserslist-db": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz", @@ -12272,6 +7686,7 @@ "url": "https://tidelift.com/funding/github/npm/browserslist" } ], + "license": "MIT", "dependencies": { "escalade": "^3.1.1", "picocolors": "^1.0.0" @@ -12288,6 +7703,7 @@ "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dev": true, + "license": "BSD-2-Clause", "dependencies": { "punycode": "^2.1.0" } @@ -12297,46 +7713,16 @@ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, - "node_modules/urix": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", - "integrity": "sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg==", - "deprecated": "Please see https://github.com/lydell/urix#deprecated", - "dev": true - }, - "node_modules/url": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", - "integrity": "sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ==", - "dev": true, - "dependencies": { - "punycode": "1.3.2", - "querystring": "0.2.0" - } - }, - "node_modules/url/node_modules/punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==", - "dev": true - }, - "node_modules/use": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", - "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/util": { "version": "0.12.5", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "license": "MIT", "dependencies": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", @@ -12348,34 +7734,16 @@ "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" }, "node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", + "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", "dev": true, "bin": { - "uuid": "bin/uuid" - } - }, - "node_modules/v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", - "dev": true - }, - "node_modules/v8flags": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/v8flags/-/v8flags-3.2.0.tgz", - "integrity": "sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg==", - "dev": true, - "dependencies": { - "homedir-polyfill": "^1.0.1" - }, - "engines": { - "node": ">= 0.10" + "uuid": "dist/bin/uuid" } }, "node_modules/validate-npm-package-license": { @@ -12388,128 +7756,11 @@ "spdx-expression-parse": "^3.0.0" } }, - "node_modules/value-or-function": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/value-or-function/-/value-or-function-3.0.0.tgz", - "integrity": "sha512-jdBB2FrWvQC/pnPtIqcLsMaQgjhdb6B7tk1MMyTKapox+tQZbdRP4uLxu/JY0t7fbfDCUMnuelzEYv5GsxHhdg==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/vinyl": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-2.2.1.tgz", - "integrity": "sha512-LII3bXRFBZLlezoG5FfZVcXflZgWP/4dCwKtxd5ky9+LOtM4CS3bIRQsmR1KMnMW07jpE8fqR2lcxPZ+8sJIcw==", - "dev": true, - "dependencies": { - "clone": "^2.1.1", - "clone-buffer": "^1.0.0", - "clone-stats": "^1.0.0", - "cloneable-readable": "^1.0.0", - "remove-trailing-separator": "^1.0.1", - "replace-ext": "^1.0.0" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/vinyl-fs": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/vinyl-fs/-/vinyl-fs-3.0.3.tgz", - "integrity": "sha512-vIu34EkyNyJxmP0jscNzWBSygh7VWhqun6RmqVfXePrOwi9lhvRs//dOaGOTRUQr4tx7/zd26Tk5WeSVZitgng==", - "dev": true, - "dependencies": { - "fs-mkdirp-stream": "^1.0.0", - "glob-stream": "^6.1.0", - "graceful-fs": "^4.0.0", - "is-valid-glob": "^1.0.0", - "lazystream": "^1.0.0", - "lead": "^1.0.0", - "object.assign": "^4.0.4", - "pumpify": "^1.3.5", - "readable-stream": "^2.3.3", - "remove-bom-buffer": "^3.0.0", - "remove-bom-stream": "^1.2.0", - "resolve-options": "^1.1.0", - "through2": "^2.0.0", - "to-through": "^2.0.0", - "value-or-function": "^3.0.0", - "vinyl": "^2.0.0", - "vinyl-sourcemap": "^1.1.0" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/vinyl-fs/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "dependencies": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - } - }, - "node_modules/vinyl-sourcemap": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/vinyl-sourcemap/-/vinyl-sourcemap-1.1.0.tgz", - "integrity": "sha512-NiibMgt6VJGJmyw7vtzhctDcfKch4e4n9TBeoWlirb7FMg9/1Ov9k+A5ZRAtywBpRPiyECvQRQllYM8dECegVA==", - "dev": true, - "dependencies": { - "append-buffer": "^1.0.2", - "convert-source-map": "^1.5.0", - "graceful-fs": "^4.1.6", - "normalize-path": "^2.1.1", - "now-and-later": "^2.0.0", - "remove-bom-buffer": "^3.0.0", - "vinyl": "^2.0.0" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/vinyl-sourcemap/node_modules/normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", - "dev": true, - "dependencies": { - "remove-trailing-separator": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/vinyl-sourcemaps-apply": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/vinyl-sourcemaps-apply/-/vinyl-sourcemaps-apply-0.2.1.tgz", - "integrity": "sha512-+oDh3KYZBoZC8hfocrbrxbLUeaYtQK7J5WU5Br9VqWqmCll3tFJqKp97GC9GmMsVIL0qnx2DgEDVxdo5EZ5sSw==", - "dev": true, - "dependencies": { - "source-map": "^0.5.1" - } - }, - "node_modules/vinyl-sourcemaps-apply/node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/vm-browserify": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", - "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==", - "dev": true - }, "node_modules/web-encoding": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/web-encoding/-/web-encoding-1.1.5.tgz", "integrity": "sha512-HYLeVCdJ0+lBYV2FvNZmv3HJ2Nt0QYXqZojk3d9FJOLkwnuhzM9tmamh8d7HPM8QqjKH8DeHkFTx+CFlWpZZDA==", + "license": "MIT", "dependencies": { "util": "^0.12.3" }, @@ -12522,6 +7773,7 @@ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dev": true, + "license": "ISC", "dependencies": { "isexe": "^2.0.0" }, @@ -12532,16 +7784,27 @@ "node": ">= 8" } }, - "node_modules/which-module": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-1.0.0.tgz", - "integrity": "sha512-F6+WgncZi/mJDrammbTuHe1q0R5hOXv/mBaiNA2TCNT/LTHusX0V+CJnj9XT8ki5ln2UZyyddDgHfCzyrOH7MQ==", - "dev": true + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/which-typed-array": { "version": "1.1.9", "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", + "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.5", "call-bind": "^1.0.2", @@ -12557,63 +7820,12 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wide-align": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", - "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", - "dev": true, - "dependencies": { - "string-width": "^1.0.2 || 2" - } - }, - "node_modules/wide-align/node_modules/ansi-regex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", - "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/wide-align/node_modules/is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/wide-align/node_modules/string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "dependencies": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/wide-align/node_modules/strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==", - "dev": true, - "dependencies": { - "ansi-regex": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/word-wrap": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -12622,13 +7834,15 @@ "version": "6.2.0", "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.0.tgz", "integrity": "sha512-Rsk5qQHJ9eowMH28Jwhe8HEbmdYDX4lwoMWshiCXugjtHqMD9ZbiqSDLxcsfdqsETPzVUtX5s1Z5kStiIM6l4A==", - "dev": true + "dev": true, + "license": "Apache-2.0" }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -12646,6 +7860,7 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -12655,6 +7870,7 @@ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -12668,24 +7884,14 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true - }, - "node_modules/write": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", - "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", "dev": true, - "dependencies": { - "mkdirp": "^0.5.1" - }, - "engines": { - "node": ">=4" - } + "license": "ISC" }, "node_modules/xml": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz", - "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==" + "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==", + "license": "MIT" }, "node_modules/xml2js": { "version": "0.5.0", @@ -12703,24 +7909,17 @@ "version": "11.0.1", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "license": "MIT", "engines": { "node": ">=4.0" } }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true, - "engines": { - "node": ">=0.4" - } - }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", "dev": true, + "license": "ISC", "engines": { "node": ">=10" } @@ -12729,12 +7928,13 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/yaml": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.1.tgz", - "integrity": "sha512-e0WHiYql7+9wr4cWMx3TVQrNwejKaEe7/rHNmQmqRjazfOP5W8PB6Jpebb5o6fIapbz9o9+2ipcaTM2ZwDI6lw==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.2.tgz", + "integrity": "sha512-CBKFWExMn46Foo4cldiChEzn7S7SRV+wqiluAb6xmueD/fGyRHIhX8m14vVGgeFWjN540nKCNVj6P21eQjgTuA==", "dev": true, "engines": { "node": ">= 14" @@ -12745,6 +7945,7 @@ "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, + "license": "MIT", "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", @@ -12763,6 +7964,7 @@ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", "dev": true, + "license": "ISC", "engines": { "node": ">=10" } @@ -12772,6 +7974,7 @@ "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", "dev": true, + "license": "MIT", "dependencies": { "camelcase": "^6.0.0", "decamelize": "^4.0.0", @@ -12787,6 +7990,7 @@ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -12799,6 +8003,7 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -12808,6 +8013,7 @@ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -12822,6 +8028,7 @@ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true, + "license": "ISC", "engines": { "node": ">=10" } @@ -12831,6 +8038,7 @@ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, diff --git a/package.json b/package.json index dc7e35dd..6368372e 100644 --- a/package.json +++ b/package.json @@ -3,19 +3,50 @@ "version": "7.1.1", "description": "S3 Compatible Cloud Storage client", "main": "./dist/main/minio.js", + "module": "./dist/esm/minio.mjs", + "types": "./types/minio.d.ts", "scripts": { - "compile": "gulp compile", - "test": "gulp test", - "lint": "gulp lint", - "lint-fix": "gulp lint --fix", - "prepublishOnly": "gulp test", - "functional": "gulp functionalTest", - "browserify": "gulp browserify", - "prepare": "husky install && npm run compile", + "prepare": "husky install", + "tsc": "tsc", + "type-check": "tsc --noEmit --emitDeclarationOnly false", + "build": "node build.mjs", + "test": "mocha", + "lint": "eslint --ext js,mjs,cjs,ts ./", + "lint-fix": "eslint --ext js,mjs,cjs,ts ./ --fix", + "prepublishOnly": "npm test", + "functional": "mocha tests/functional/functional-tests.js", "format": "prettier -w .", "format-check": "prettier --list-different .", "lint-staged": "lint-staged" }, + "exports": { + ".": { + "types": "./types/minio.d.ts", + "require": "./dist/main/minio.js", + "default": "./dist/esm/minio.mjs" + }, + "./dist/main/*": { + "types": "./dist/main/*", + "require": "./dist/main/*", + "default": null + }, + "./dist/esm/*": { + "types": "./dist/esm/*", + "import": "./dist/esm/*", + "default": null + }, + "./package.json": "./package.json" + }, + "files": [ + "package.json", + "./dist/", + "./src/", + "./types/", + "LICENSE", + "README.md", + "README_zh_CN.md", + "MAINTAINERS.md" + ], "prettier": { "printWidth": 120, "singleQuote": true, @@ -24,10 +55,10 @@ "semi": false }, "lint-staged": { - "*.{js,cjs,mjs,ts,cts,mts,json}": [ + "*.json": [ "prettier --write" ], - "src/**/*.{js,cjs,mjs,ts,cts,mts}": [ + "*.{js,cjs,mjs,ts}": [ "eslint --fix", "prettier --write" ] @@ -55,8 +86,6 @@ "block-stream2": "^2.0.0", "browser-or-node": "^1.3.0", "buffer-crc32": "^0.2.13", - "crypto-browserify": "^3.12.0", - "es6-error": "^4.1.1", "fast-xml-parser": "^4.1.3", "ipaddr.js": "^2.0.1", "json-stream": "^1.0.0", @@ -72,22 +101,30 @@ "devDependencies": { "@babel/core": "^7.12.10", "@babel/preset-env": "^7.12.10", + "@babel/preset-typescript": "^7.21.4", + "@babel/register": "^7.21.0", + "@nodelib/fs.walk": "^1.2.8", + "@types/async": "^3.2.18", + "@types/browser-or-node": "^1.3.0", "@types/lodash": "^4.14.192", + "@types/mime-types": "^2.1.1", "@types/node": "^18.15.11", + "@types/xml": "^1.0.8", + "@types/xml2js": "^0.4.11", "@typescript-eslint/eslint-plugin": "^5.57.1", "@typescript-eslint/parser": "^5.57.1", - "babelify": "^10.0.0", - "browserify": "^16.5.2", + "@upleveled/babel-plugin-remove-node-prefix": "^1.0.4", + "babel-plugin-replace-import-extension": "^1.1.3", + "babel-plugin-transform-replace-expressions": "^0.2.0", "chai": "^4.2.0", + "dotenv": "^16.0.3", "eslint": "^8.37.0", "eslint-config-prettier": "^8.8.0", + "eslint-import-resolver-typescript": "^3.5.5", + "eslint-plugin-import": "^2.27.5", "eslint-plugin-simple-import-sort": "^10.0.0", - "gulp": "^4.0.2", - "gulp-babel": "^8.0.0", - "gulp-eslint": "^6.0.0", - "gulp-if": "^3.0.0", - "gulp-mocha": "^8.0.0", - "gulp-sourcemaps": "^2.6.5", + "eslint-plugin-unicorn": "^46.0.0", + "eslint-plugin-unused-imports": "^2.0.0", "husky": "^8.0.3", "lint-staged": "^13.2.1", "mocha": "^9.2.0", @@ -97,7 +134,8 @@ "source-map-support": "^0.5.13", "split-file": "^2.2.2", "superagent": "^5.1.0", - "uuid": "^3.4.0" + "typescript": "^5.0.4", + "uuid": "^9.0.0" }, "keywords": [ "api", diff --git a/src/AssumeRoleProvider.js b/src/AssumeRoleProvider.js new file mode 100644 index 00000000..455d7f52 --- /dev/null +++ b/src/AssumeRoleProvider.js @@ -0,0 +1,218 @@ +import * as Http from 'node:http' +import * as Https from 'node:https' +import { URL, URLSearchParams } from 'node:url' + +import { CredentialProvider } from './CredentialProvider.js' +import { Credentials } from './Credentials.js' +import { makeDateLong, parseXml, toSha256 } from './helpers.js' +import { signV4ByServiceName } from './signing.js' + +class AssumeRoleProvider extends CredentialProvider { + constructor({ + stsEndpoint, + accessKey, + secretKey, + durationSeconds = 900, + sessionToken, + policy, + region = '', + roleArn, + roleSessionName, + externalId, + token, + webIdentityToken, + action = 'AssumeRole', + transportAgent = undefined, + }) { + super({}) + + this.stsEndpoint = stsEndpoint + this.accessKey = accessKey + this.secretKey = secretKey + this.durationSeconds = durationSeconds + this.policy = policy + this.region = region + this.roleArn = roleArn + this.roleSessionName = roleSessionName + this.externalId = externalId + this.token = token + this.webIdentityToken = webIdentityToken + this.action = action + this.sessionToken = sessionToken + // By default, nodejs uses a global agent if the 'agent' property + // is set to undefined. Otherwise, it's okay to assume the users + // know what they're doing if they specify a custom transport agent. + this.transportAgent = transportAgent + + /** + * Internal Tracking variables + */ + this.credentials = null + this.expirySeconds = null + this.accessExpiresAt = null + } + + getRequestConfig() { + const url = new URL(this.stsEndpoint) + const hostValue = url.hostname + const portValue = url.port + const isHttp = url.protocol.includes('http:') + const qryParams = new URLSearchParams() + qryParams.set('Action', this.action) + qryParams.set('Version', '2011-06-15') + + const defaultExpiry = 900 + let expirySeconds = parseInt(this.durationSeconds) + if (expirySeconds < defaultExpiry) { + expirySeconds = defaultExpiry + } + this.expirySeconds = expirySeconds // for calculating refresh of credentials. + + qryParams.set('DurationSeconds', this.expirySeconds) + + if (this.policy) { + qryParams.set('Policy', this.policy) + } + if (this.roleArn) { + qryParams.set('RoleArn', this.roleArn) + } + + if (this.roleSessionName != null) { + qryParams.set('RoleSessionName', this.roleSessionName) + } + if (this.token != null) { + qryParams.set('Token', this.token) + } + + if (this.webIdentityToken) { + qryParams.set('WebIdentityToken', this.webIdentityToken) + } + + if (this.externalId) { + qryParams.set('ExternalId', this.externalId) + } + + const urlParams = qryParams.toString() + const contentSha256 = toSha256(urlParams) + + const date = new Date() + + /** + * Nodejs's Request Configuration. + */ + const requestOptions = { + hostname: hostValue, + port: portValue, + path: '/', + protocol: url.protocol, + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + 'content-length': urlParams.length, + host: hostValue, + 'x-amz-date': makeDateLong(date), + 'x-amz-content-sha256': contentSha256, + }, + agent: this.transportAgent, + } + + const authorization = signV4ByServiceName(requestOptions, this.accessKey, this.secretKey, this.region, date, 'sts') + requestOptions.headers.authorization = authorization + + return { + requestOptions, + requestData: urlParams, + isHttp: isHttp, + } + } + + async performRequest() { + const reqObj = this.getRequestConfig() + const requestOptions = reqObj.requestOptions + const requestData = reqObj.requestData + + const isHttp = reqObj.isHttp + const Transport = isHttp ? Http : Https + + const promise = new Promise((resolve, reject) => { + const requestObj = Transport.request(requestOptions, (resp) => { + let resChunks = [] + resp.on('data', (rChunk) => { + resChunks.push(rChunk) + }) + resp.on('end', () => { + let body = Buffer.concat(resChunks).toString() + const xmlobj = parseXml(body) + resolve(xmlobj) + }) + resp.on('error', (err) => { + reject(err) + }) + }) + requestObj.on('error', (e) => { + reject(e) + }) + requestObj.write(requestData) + requestObj.end() + }) + return promise + } + + parseCredentials(respObj = {}) { + if (respObj.ErrorResponse) { + throw new Error('Unable to obtain credentials:', respObj) + } + const { + AssumeRoleResponse: { + AssumeRoleResult: { + Credentials: { + AccessKeyId: accessKey, + SecretAccessKey: secretKey, + SessionToken: sessionToken, + Expiration: expiresAt, + } = {}, + } = {}, + } = {}, + } = respObj + + this.accessExpiresAt = expiresAt + + const newCreds = new Credentials({ + accessKey, + secretKey, + sessionToken, + }) + + this.setCredentials(newCreds) + return this.credentials + } + + async refreshCredentials() { + try { + const assumeRoleCredentials = await this.performRequest() + this.credentials = this.parseCredentials(assumeRoleCredentials) + } catch (err) { + this.credentials = null + } + return this.credentials + } + + async getCredentials() { + let credConfig + if (!this.credentials || (this.credentials && this.isAboutToExpire())) { + credConfig = await this.refreshCredentials() + } else { + credConfig = this.credentials + } + return credConfig + } + + isAboutToExpire() { + const expiresAt = new Date(this.accessExpiresAt) + const provisionalExpiry = new Date(Date.now() + 1000 * 10) // check before 10 seconds. + const isAboutToExpire = provisionalExpiry > expiresAt + return isAboutToExpire + } +} + +export default AssumeRoleProvider diff --git a/src/CredentialProvider.js b/src/CredentialProvider.js new file mode 100644 index 00000000..3e234034 --- /dev/null +++ b/src/CredentialProvider.js @@ -0,0 +1,50 @@ +import { Credentials } from './Credentials.js' + +export class CredentialProvider { + constructor({ accessKey, secretKey, sessionToken }) { + this.credentials = new Credentials({ + accessKey, + secretKey, + sessionToken, + }) + } + + getCredentials() { + return this.credentials.get() + } + + setCredentials(credentials) { + if (credentials instanceof Credentials) { + this.credentials = credentials + } else { + throw new Error('Unable to set Credentials . it should be an instance of Credentials class') + } + } + + setAccessKey(accessKey) { + this.credentials.setAccessKey(accessKey) + } + + getAccessKey() { + return this.credentials.getAccessKey() + } + + setSecretKey(secretKey) { + this.credentials.setSecretKey(secretKey) + } + + getSecretKey() { + return this.credentials.getSecretKey() + } + + setSessionToken(sessionToken) { + this.credentials.setSessionToken(sessionToken) + } + + getSessionToken() { + return this.credentials.getSessionToken() + } +} + +// deprecated, keep for backward compatibility. +export default CredentialProvider diff --git a/src/Credentials.js b/src/Credentials.js new file mode 100644 index 00000000..4babcfa2 --- /dev/null +++ b/src/Credentials.js @@ -0,0 +1,37 @@ +export class Credentials { + constructor({ accessKey, secretKey, sessionToken }) { + this.accessKey = accessKey + this.secretKey = secretKey + this.sessionToken = sessionToken + } + + setAccessKey(accessKey) { + this.accessKey = accessKey + } + getAccessKey() { + return this.accessKey + } + setSecretKey(secretKey) { + this.secretKey = secretKey + } + getSecretKey() { + return this.secretKey + } + setSessionToken(sessionToken) { + this.sessionToken = sessionToken + } + getSessionToken() { + return this.sessionToken + } + + get() { + return { + accessKey: this.accessKey, + secretKey: this.secretKey, + sessionToken: this.sessionToken, + } + } +} + +// deprecated, keep for backward compatibility. +export default Credentials diff --git a/src/base-error.ts b/src/base-error.ts new file mode 100644 index 00000000..d3947b6d --- /dev/null +++ b/src/base-error.ts @@ -0,0 +1,30 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/// + +/** + * @internal + */ +export class ExtendableError extends Error { + constructor(message?: string, opt?: ErrorOptions) { + // error Option {cause?: unknown} is a 'nice to have', + // don't use it internally + super(message, opt) + // set error name, otherwise it's always 'Error' + this.name = this.constructor.name + } +} diff --git a/src/errors.ts b/src/errors.ts new file mode 100644 index 00000000..fa6f62fb --- /dev/null +++ b/src/errors.ts @@ -0,0 +1,107 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { ExtendableError } from './base-error.ts' + +/** + * AnonymousRequestError is generated for anonymous keys on specific + * APIs. NOTE: PresignedURL generation always requires access keys. + */ +export class AnonymousRequestError extends ExtendableError {} + +/** + * InvalidArgumentError is generated for all invalid arguments. + */ +export class InvalidArgumentError extends ExtendableError {} + +/** + * InvalidPortError is generated when a non integer value is provided + * for ports. + */ +export class InvalidPortError extends ExtendableError {} + +/** + * InvalidEndpointError is generated when an invalid end point value is + * provided which does not follow domain standards. + */ +export class InvalidEndpointError extends ExtendableError {} + +/** + * InvalidBucketNameError is generated when an invalid bucket name is + * provided which does not follow AWS S3 specifications. + * http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export class InvalidBucketNameError extends ExtendableError {} + +/** + * InvalidObjectNameError is generated when an invalid object name is + * provided which does not follow AWS S3 specifications. + * http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html + */ +export class InvalidObjectNameError extends ExtendableError {} + +/** + * AccessKeyRequiredError generated by signature methods when access + * key is not found. + */ +export class AccessKeyRequiredError extends ExtendableError {} + +/** + * SecretKeyRequiredError generated by signature methods when secret + * key is not found. + */ +export class SecretKeyRequiredError extends ExtendableError {} + +/** + * ExpiresParamError generated when expires parameter value is not + * well within stipulated limits. + */ +export class ExpiresParamError extends ExtendableError {} + +/** + * InvalidDateError generated when invalid date is found. + */ +export class InvalidDateError extends ExtendableError {} + +/** + * InvalidPrefixError generated when object prefix provided is invalid + * or does not conform to AWS S3 object key restrictions. + */ +export class InvalidPrefixError extends ExtendableError {} + +/** + * InvalidBucketPolicyError generated when the given bucket policy is invalid. + */ +export class InvalidBucketPolicyError extends ExtendableError {} + +/** + * IncorrectSizeError generated when total data read mismatches with + * the input size. + */ +export class IncorrectSizeError extends ExtendableError {} + +/** + * InvalidXMLError generated when an unknown XML is found. + */ +export class InvalidXMLError extends ExtendableError {} + +/** + * S3Error is generated for errors returned from S3 server. + * see getErrorTransformer for details + */ +export class S3Error extends ExtendableError {} + +export class IsValidBucketNameError extends ExtendableError {} diff --git a/src/extensions.js b/src/extensions.js new file mode 100644 index 00000000..f0e98957 --- /dev/null +++ b/src/extensions.js @@ -0,0 +1,175 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2020 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as Stream from 'node:stream' + +import * as errors from './errors.ts' +import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.js' +import * as transformers from './transformers.js' + +export class extensions { + constructor(client) { + this.client = client + } + + // List the objects in the bucket using S3 ListObjects V2 With Metadata + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) + // + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + // * `obj.lastModified` _Date_: modified time stamp + // * `obj.metadata` _object_: metadata of the object + + listObjectsV2WithMetadata(bucketName, prefix, recursive, startAfter) { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (startAfter === undefined) { + startAfter = '' + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + // if recursive is false set delimiter to '/' + var delimiter = recursive ? '' : '/' + var continuationToken = '' + var objects = [] + var ended = false + var readStream = Stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + // if there are no objects to push do query for the next batch of objects + this.listObjectsV2WithMetadataQuery(bucketName, prefix, continuationToken, delimiter, 1000, startAfter) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + if (result.isTruncated) { + continuationToken = result.nextContinuationToken + } else { + ended = true + } + objects = result.objects + readStream._read() + }) + } + return readStream + } + + // listObjectsV2WithMetadataQuery - (List Objects V2 with metadata) - List some or all (up to 1000) of the objects in a bucket. + // + // You can use the request parameters as selection criteria to return a subset of the objects in a bucket. + // request parameters :- + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: Limits the response to keys that begin with the specified prefix. + // * `continuation-token` _string_: Used to continue iterating over a set of objects. + // * `delimiter` _string_: A delimiter is a character you use to group keys. + // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. + // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. + + listObjectsV2WithMetadataQuery(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(continuationToken)) { + throw new TypeError('continuationToken should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + if (!isNumber(maxKeys)) { + throw new TypeError('maxKeys should be of type "number"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + var queries = [] + + // Call for listing objects v2 API + queries.push(`list-type=2`) + queries.push(`encoding-type=url`) + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + queries.push(`metadata=true`) + + if (continuationToken) { + continuationToken = uriEscape(continuationToken) + queries.push(`continuation-token=${continuationToken}`) + } + // Set start-after + if (startAfter) { + startAfter = uriEscape(startAfter) + queries.push(`start-after=${startAfter}`) + } + // no need to escape maxKeys + if (maxKeys) { + if (maxKeys >= 1000) { + maxKeys = 1000 + } + queries.push(`max-keys=${maxKeys}`) + } + queries.sort() + var query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + var method = 'GET' + var transformer = transformers.getListObjectsV2WithMetadataTransformer() + this.client.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return transformer.emit('error', e) + } + pipesetup(response, transformer) + }) + return transformer + } +} + +// deprecated, keep for backward compatibility. +export default extensions diff --git a/src/helpers.js b/src/helpers.js new file mode 100644 index 00000000..18091089 --- /dev/null +++ b/src/helpers.js @@ -0,0 +1,822 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as Crypto from 'node:crypto' +import * as fs from 'node:fs' +import * as path from 'node:path' +import * as stream from 'node:stream' + +import { isBrowser } from 'browser-or-node' +import { XMLParser } from 'fast-xml-parser' +import ipaddr from 'ipaddr.js' +import _ from 'lodash' +import mime from 'mime-types' +import querystring from 'query-string' + +import * as errors from './errors.ts' + +const fxp = new XMLParser() + +// Returns a wrapper function that will promisify a given callback function. +// It will preserve 'this'. +export function promisify(fn) { + return function () { + // If the last argument is a function, assume its the callback. + let callback = arguments[arguments.length - 1] + + // If the callback is given, don't promisify, just pass straight in. + if (typeof callback === 'function') { + return fn.apply(this, arguments) + } + + // Otherwise, create a new set of arguments, and wrap + // it in a promise. + let args = [...arguments] + + return new Promise((resolve, reject) => { + // Add the callback function. + args.push((err, value) => { + if (err) { + return reject(err) + } + + resolve(value) + }) + + // Call the function with our special adaptor callback added. + fn.apply(this, args) + }) + } +} + +// All characters in string which are NOT unreserved should be percent encoded. +// Unreserved characers are : ALPHA / DIGIT / "-" / "." / "_" / "~" +// Reference https://tools.ietf.org/html/rfc3986#section-2.2 +export function uriEscape(string) { + return string.split('').reduce((acc, elem) => { + let buf = Buffer.from(elem) + if (buf.length === 1) { + // length 1 indicates that elem is not a unicode character. + // Check if it is an unreserved characer. + if ( + ('A' <= elem && elem <= 'Z') || + ('a' <= elem && elem <= 'z') || + ('0' <= elem && elem <= '9') || + elem === '_' || + elem === '.' || + elem === '~' || + elem === '-' + ) { + // Unreserved characer should not be encoded. + acc = acc + elem + return acc + } + } + // elem needs encoding - i.e elem should be encoded if it's not unreserved + // character or if it's a unicode character. + for (var i = 0; i < buf.length; i++) { + acc = acc + '%' + buf[i].toString(16).toUpperCase() + } + return acc + }, '') +} + +export function uriResourceEscape(string) { + return uriEscape(string).replace(/%2F/g, '/') +} + +export function getScope(region, date, serviceName = 's3') { + return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request` +} + +// isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' +export function isAmazonEndpoint(endpoint) { + return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn' +} + +// isVirtualHostStyle - verify if bucket name is support with virtual +// hosts. bucketNames with periods should be always treated as path +// style if the protocol is 'https:', this is due to SSL wildcard +// limitation. For all other buckets and Amazon S3 endpoint we will +// default to virtual host style. +export function isVirtualHostStyle(endpoint, protocol, bucket, pathStyle) { + if (protocol === 'https:' && bucket.indexOf('.') > -1) { + return false + } + return isAmazonEndpoint(endpoint) || !pathStyle +} + +export function isValidIP(ip) { + return ipaddr.isValid(ip) +} + +// isValidEndpoint - true if endpoint is valid domain. +export function isValidEndpoint(endpoint) { + return isValidDomain(endpoint) || isValidIP(endpoint) +} + +// isValidDomain - true if input host is a valid domain. +export function isValidDomain(host) { + if (!isString(host)) { + return false + } + // See RFC 1035, RFC 3696. + if (host.length === 0 || host.length > 255) { + return false + } + // Host cannot start or end with a '-' + if (host[0] === '-' || host.slice(-1) === '-') { + return false + } + // Host cannot start or end with a '_' + if (host[0] === '_' || host.slice(-1) === '_') { + return false + } + // Host cannot start with a '.' + if (host[0] === '.') { + return false + } + var alphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:> -1) { + return false + } + } + // No need to regexp match, since the list is non-exhaustive. + // We let it be valid and fail later. + return true +} + +// Probes contentType using file extensions. +// For example: probeContentType('file.png') returns 'image/png'. +export function probeContentType(path) { + let contentType = mime.lookup(path) + if (!contentType) { + contentType = 'application/octet-stream' + } + return contentType +} + +// isValidPort - is input port valid. +export function isValidPort(port) { + // verify if port is a number. + if (!isNumber(port)) { + return false + } + // port cannot be negative. + if (port < 0) { + return false + } + // port '0' is valid and special case return true. + if (port === 0) { + return true + } + var min_port = 1 + var max_port = 65535 + // Verify if port is in range. + return port >= min_port && port <= max_port +} + +export function isValidBucketName(bucket) { + if (!isString(bucket)) { + return false + } + + // bucket length should be less than and no more than 63 + // characters long. + if (bucket.length < 3 || bucket.length > 63) { + return false + } + // bucket with successive periods is invalid. + if (bucket.indexOf('..') > -1) { + return false + } + // bucket cannot have ip address style. + if (bucket.match(/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/)) { + return false + } + // bucket should begin with alphabet/number and end with alphabet/number, + // with alphabet/number/.- in the middle. + if (bucket.match(/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/)) { + return true + } + return false +} + +// check if objectName is a valid object name +export function isValidObjectName(objectName) { + if (!isValidPrefix(objectName)) { + return false + } + if (objectName.length === 0) { + return false + } + return true +} + +// check if prefix is valid +export function isValidPrefix(prefix) { + if (!isString(prefix)) { + return false + } + if (prefix.length > 1024) { + return false + } + return true +} + +// check if typeof arg number +export function isNumber(arg) { + return typeof arg === 'number' +} + +// check if typeof arg function +export function isFunction(arg) { + return typeof arg === 'function' +} + +// check if typeof arg string +export function isString(arg) { + return typeof arg === 'string' +} + +// check if typeof arg object +export function isObject(arg) { + return typeof arg === 'object' && arg !== null +} + +// check if object is readable stream +export function isReadableStream(arg) { + return isObject(arg) && isFunction(arg._read) +} + +// check if arg is boolean +export function isBoolean(arg) { + return typeof arg === 'boolean' +} + +// check if arg is array +export function isArray(arg) { + return Array.isArray(arg) +} + +// check if arg is a valid date +export function isValidDate(arg) { + return arg instanceof Date && !isNaN(arg) +} + +// Create a Date string with format: +// 'YYYYMMDDTHHmmss' + Z +export function makeDateLong(date) { + date = date || new Date() + + // Gives format like: '2017-08-07T16:28:59.889Z' + date = date.toISOString() + + return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 13) + date.slice(14, 16) + date.slice(17, 19) + 'Z' +} + +// Create a Date string with format: +// 'YYYYMMDD' +export function makeDateShort(date) { + date = date || new Date() + + // Gives format like: '2017-08-07T16:28:59.889Z' + date = date.toISOString() + + return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 10) +} + +// pipesetup sets up pipe() from left to right os streams array +// pipesetup will also make sure that error emitted at any of the upstream Stream +// will be emitted at the last stream. This makes error handling simple +export function pipesetup(...streams) { + return streams.reduce((src, dst) => { + src.on('error', (err) => dst.emit('error', err)) + return src.pipe(dst) + }) +} + +// return a Readable stream that emits data +export function readableStream(data) { + var s = new stream.Readable() + s._read = () => {} + s.push(data) + s.push(null) + return s +} + +// Process metadata to insert appropriate value to `content-type` attribute +export function insertContentType(metaData, filePath) { + // check if content-type attribute present in metaData + for (var key in metaData) { + if (key.toLowerCase() === 'content-type') { + return metaData + } + } + // if `content-type` attribute is not present in metadata, + // then infer it from the extension in filePath + var newMetadata = Object.assign({}, metaData) + newMetadata['content-type'] = probeContentType(filePath) + return newMetadata +} + +// Function prepends metadata with the appropriate prefix if it is not already on +export function prependXAMZMeta(metaData) { + var newMetadata = Object.assign({}, metaData) + for (var key in metaData) { + if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageclassHeader(key)) { + newMetadata['X-Amz-Meta-' + key] = newMetadata[key] + delete newMetadata[key] + } + } + return newMetadata +} + +// Checks if it is a valid header according to the AmazonS3 API +export function isAmzHeader(key) { + var temp = key.toLowerCase() + return ( + temp.startsWith('x-amz-meta-') || + temp === 'x-amz-acl' || + temp.startsWith('x-amz-server-side-encryption-') || + temp === 'x-amz-server-side-encryption' + ) +} +// Checks if it is a supported Header +export function isSupportedHeader(key) { + var supported_headers = [ + 'content-type', + 'cache-control', + 'content-encoding', + 'content-disposition', + 'content-language', + 'x-amz-website-redirect-location', + ] + return supported_headers.indexOf(key.toLowerCase()) > -1 +} +// Checks if it is a storage header +export function isStorageclassHeader(key) { + return key.toLowerCase() === 'x-amz-storage-class' +} + +export function extractMetadata(metaData) { + var newMetadata = {} + for (var key in metaData) { + if (isSupportedHeader(key) || isStorageclassHeader(key) || isAmzHeader(key)) { + if (key.toLowerCase().startsWith('x-amz-meta-')) { + newMetadata[key.slice(11, key.length)] = metaData[key] + } else { + newMetadata[key] = metaData[key] + } + } + } + return newMetadata +} + +export function getVersionId(headers = {}) { + const versionIdValue = headers['x-amz-version-id'] + return versionIdValue || null +} + +export function getSourceVersionId(headers = {}) { + const sourceVersionId = headers['x-amz-copy-source-version-id'] + return sourceVersionId || null +} + +export function sanitizeETag(etag = '') { + var replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } + return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m]) +} + +export const RETENTION_MODES = { + GOVERNANCE: 'GOVERNANCE', + COMPLIANCE: 'COMPLIANCE', +} + +export const RETENTION_VALIDITY_UNITS = { + DAYS: 'Days', + YEARS: 'Years', +} + +export const LEGAL_HOLD_STATUS = { + ENABLED: 'ON', + DISABLED: 'OFF', +} + +const objectToBuffer = (payload) => { + const payloadBuf = Buffer.from(Buffer.from(payload)) + return payloadBuf +} + +export const toMd5 = (payload) => { + let payLoadBuf = objectToBuffer(payload) + // use string from browser and buffer from nodejs + // browser support is tested only against minio server + payLoadBuf = isBrowser ? payLoadBuf.toString() : payLoadBuf + return Crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') +} + +export const toSha256 = (payload) => { + return Crypto.createHash('sha256').update(payload).digest('hex') +} + +// toArray returns a single element array with param being the element, +// if param is just a string, and returns 'param' back if it is an array +// So, it makes sure param is always an array +export const toArray = (param) => { + if (!Array.isArray(param)) { + return [param] + } + return param +} + +export const sanitizeObjectKey = (objectName) => { + // + symbol characters are not decoded as spaces in JS. so replace them first and decode to get the correct result. + let asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') + const sanitizedName = decodeURIComponent(asStrName) + return sanitizedName +} + +export const PART_CONSTRAINTS = { + // absMinPartSize - absolute minimum part size (5 MiB) + ABS_MIN_PART_SIZE: 1024 * 1024 * 5, + // MIN_PART_SIZE - minimum part size 16MiB per object after which + MIN_PART_SIZE: 1024 * 1024 * 16, + // MAX_PARTS_COUNT - maximum number of parts for a single multipart session. + MAX_PARTS_COUNT: 10000, + // MAX_PART_SIZE - maximum part size 5GiB for a single multipart upload + // operation. + MAX_PART_SIZE: 1024 * 1024 * 1024 * 5, + // MAX_SINGLE_PUT_OBJECT_SIZE - maximum size 5GiB of object per PUT + // operation. + MAX_SINGLE_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 5, + // MAX_MULTIPART_PUT_OBJECT_SIZE - maximum size 5TiB of object for + // Multipart operation. + MAX_MULTIPART_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 1024 * 5, +} + +export const ENCRYPTION_TYPES = { + // SSEC represents server-side-encryption with customer provided keys + SSEC: 'SSE-C', + // KMS represents server-side-encryption with managed keys + KMS: 'KMS', +} +const GENERIC_SSE_HEADER = 'X-Amz-Server-Side-Encryption' + +const ENCRYPTION_HEADERS = { + // sseGenericHeader is the AWS SSE header used for SSE-S3 and SSE-KMS. + sseGenericHeader: GENERIC_SSE_HEADER, + // sseKmsKeyID is the AWS SSE-KMS key id. + sseKmsKeyID: GENERIC_SSE_HEADER + '-Aws-Kms-Key-Id', +} + +/** + * Return Encryption headers + * @param encConfig + * @returns an object with key value pairs that can be used in headers. + */ +function getEncryptionHeaders(encConfig) { + const encType = encConfig.type + const encHeaders = {} + if (!_.isEmpty(encType)) { + if (encType === ENCRYPTION_TYPES.SSEC) { + return { + [encHeaders[ENCRYPTION_HEADERS.sseGenericHeader]]: 'AES256', + } + } else if (encType === ENCRYPTION_TYPES.KMS) { + return { + [ENCRYPTION_HEADERS.sseGenericHeader]: encConfig.SSEAlgorithm, + [ENCRYPTION_HEADERS.sseKmsKeyID]: encConfig.KMSMasterKeyID, + } + } + } + + return encHeaders +} + +export class CopySourceOptions { + /** + * + * @param Bucket __string__ Bucket Name + * @param Object __string__ Object Name + * @param VersionID __string__ Valid versionId + * @param MatchETag __string__ Etag to match + * @param NoMatchETag __string__ Etag to exclude + * @param MatchModifiedSince __string__ Modified Date of the object/part. UTC Date in string format + * @param MatchUnmodifiedSince __string__ Modified Date of the object/part to exclude UTC Date in string format + * @param MatchRange __boolean__ true or false Object range to match + * @param Start + * @param End + * @param Encryption + */ + constructor({ + Bucket = '', + Object = '', + VersionID = '', + MatchETag = '', + NoMatchETag = '', + MatchModifiedSince = null, + MatchUnmodifiedSince = null, + MatchRange = false, + Start = 0, + End = 0, + Encryption = {}, + } = {}) { + this.Bucket = Bucket + this.Object = Object + this.VersionID = VersionID + this.MatchETag = MatchETag + this.NoMatchETag = NoMatchETag + this.MatchModifiedSince = MatchModifiedSince + this.MatchUnmodifiedSince = MatchUnmodifiedSince + this.MatchRange = MatchRange + this.Start = Start + this.End = End + this.Encryption = Encryption + } + + validate() { + if (!isValidBucketName(this.Bucket)) { + throw new errors.InvalidBucketNameError('Invalid Source bucket name: ' + this.Bucket) + } + if (!isValidObjectName(this.Object)) { + throw new errors.InvalidObjectNameError(`Invalid Source object name: ${this.Object}`) + } + if ((this.MatchRange && this.Start !== -1 && this.End !== -1 && this.Start > this.End) || this.Start < 0) { + throw new errors.InvalidObjectNameError('Source start must be non-negative, and start must be at most end.') + } else if ((this.MatchRange && !isNumber(this.Start)) || !isNumber(this.End)) { + throw new errors.InvalidObjectNameError( + 'MatchRange is specified. But Invalid Start and End values are specified. ', + ) + } + + return true + } + + getHeaders() { + let headerOptions = {} + headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + + if (!_.isEmpty(this.VersionID)) { + headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID + } + + if (!_.isEmpty(this.MatchETag)) { + headerOptions['x-amz-copy-source-if-match'] = this.MatchETag + } + if (!_.isEmpty(this.NoMatchETag)) { + headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag + } + + if (!_.isEmpty(this.MatchModifiedSince)) { + headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince + } + if (!_.isEmpty(this.MatchUnmodifiedSince)) { + headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince + } + + return headerOptions + } +} + +export class CopyDestinationOptions { + /* + * @param Bucket __string__ + * @param Object __string__ Object Name for the destination (composed/copied) object defaults + * @param Encryption __object__ Encryption configuration defaults to {} + * @param UserMetadata __object__ + * @param UserTags __object__ | __string__ + * @param LegalHold __string__ ON | OFF + * @param RetainUntilDate __string__ UTC Date String + * @param Mode + */ + constructor({ + Bucket = '', + Object = '', + Encryption = null, + UserMetadata = null, + UserTags = null, + LegalHold = null, + RetainUntilDate = null, + Mode = null, // + }) { + this.Bucket = Bucket + this.Object = Object + this.Encryption = Encryption + this.UserMetadata = UserMetadata + this.UserTags = UserTags + this.LegalHold = LegalHold + this.Mode = Mode // retention mode + this.RetainUntilDate = RetainUntilDate + } + + getHeaders() { + const replaceDirective = 'REPLACE' + const headerOptions = {} + + const userTags = this.UserTags + if (!_.isEmpty(userTags)) { + headerOptions['X-Amz-Tagging-Directive'] = replaceDirective + headerOptions['X-Amz-Tagging'] = isObject(userTags) + ? querystring.stringify(userTags) + : isString(userTags) + ? userTags + : '' + } + + if (!_.isEmpty(this.Mode)) { + headerOptions['X-Amz-Object-Lock-Mode'] = this.Mode // GOVERNANCE or COMPLIANCE + } + + if (!_.isEmpty(this.RetainUntilDate)) { + headerOptions['X-Amz-Object-Lock-Retain-Until-Date'] = this.RetainUntilDate // needs to be UTC. + } + + if (!_.isEmpty(this.LegalHold)) { + headerOptions['X-Amz-Object-Lock-Legal-Hold'] = this.LegalHold // ON or OFF + } + + if (!_.isEmpty(this.UserMetadata)) { + const headerKeys = Object.keys(this.UserMetadata) + headerKeys.forEach((key) => { + headerOptions[`X-Amz-Meta-${key}`] = this.UserMetadata[key] + }) + } + + if (!_.isEmpty(this.Encryption)) { + const encryptionHeaders = getEncryptionHeaders(this.Encryption) + Object.keys(encryptionHeaders).forEach((key) => { + headerOptions[key] = encryptionHeaders[key] + }) + } + return headerOptions + } + validate() { + if (!isValidBucketName(this.Bucket)) { + throw new errors.InvalidBucketNameError('Invalid Destination bucket name: ' + this.Bucket) + } + if (!isValidObjectName(this.Object)) { + throw new errors.InvalidObjectNameError(`Invalid Destination object name: ${this.Object}`) + } + if (!_.isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) { + throw new errors.InvalidObjectNameError(`Destination UserMetadata should be an object with key value pairs`) + } + + if (!_.isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) { + throw new errors.InvalidObjectNameError( + `Invalid Mode specified for destination object it should be one of [GOVERNANCE,COMPLIANCE]`, + ) + } + + if (!_.isEmpty(this.Encryption) && _.isEmpty(this.Encryption)) { + throw new errors.InvalidObjectNameError(`Invalid Encryption configuration for destination object `) + } + return true + } +} + +export const partsRequired = (size) => { + let maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) + let requiredPartSize = size / maxPartSize + if (size % maxPartSize > 0) { + requiredPartSize++ + } + requiredPartSize = Math.trunc(requiredPartSize) + return requiredPartSize +} + +// calculateEvenSplits - computes splits for a source and returns +// start and end index slices. Splits happen evenly to be sure that no +// part is less than 5MiB, as that could fail the multipart request if +// it is not the last part. + +let startIndexParts = [] +let endIndexParts = [] +export function calculateEvenSplits(size, objInfo) { + if (size === 0) { + return null + } + const reqParts = partsRequired(size) + startIndexParts = new Array(reqParts) + endIndexParts = new Array(reqParts) + + let start = objInfo.Start + if (_.isEmpty(objInfo.Start) || start === -1) { + start = 0 + } + const divisorValue = Math.trunc(size / reqParts) + + const reminderValue = size % reqParts + + let nextStart = start + + for (let i = 0; i < reqParts; i++) { + let curPartSize = divisorValue + if (i < reminderValue) { + curPartSize++ + } + + const currentStart = nextStart + let currentEnd = currentStart + curPartSize - 1 + nextStart = currentEnd + 1 + + startIndexParts[i] = currentStart + endIndexParts[i] = currentEnd + } + + return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo } +} + +export function removeDirAndFiles(dirPath, removeSelf) { + if (removeSelf === undefined) { + removeSelf = true + } + try { + var files = fs.readdirSync(dirPath) + } catch (e) { + return + } + if (files.length > 0) { + for (var i = 0; i < files.length; i++) { + var filePath = path.join(dirPath, files[i]) + if (fs.statSync(filePath).isFile()) { + fs.unlinkSync(filePath) + } else { + removeDirAndFiles(filePath) + } + } + } + if (removeSelf) { + fs.rmdirSync(dirPath) + } +} + +export const parseXml = (xml) => { + let result = null + result = fxp.parse(xml) + if (result.Error) { + throw result.Error + } + + return result +} + +export class SelectResults { + constructor({ + records, // parsed data as stream + response, // original response stream + stats, // stats as xml + progress, // stats as xml + }) { + this.records = records + this.response = response + this.stats = stats + this.progress = progress + } + + setStats(stats) { + this.stats = stats + } + getStats() { + return this.stats + } + + setProgress(progress) { + this.progress = progress + } + getProgress() { + return this.progress + } + + setResponse(response) { + this.response = response + } + getResponse() { + return this.response + } + + setRecords(records) { + this.records = records + } + + getRecords() { + return this.records + } +} + +export const DEFAULT_REGION = 'us-east-1' diff --git a/src/minio.js b/src/minio.js new file mode 100644 index 00000000..7b3e05eb --- /dev/null +++ b/src/minio.js @@ -0,0 +1,3987 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as fs from 'node:fs' +import * as Http from 'node:http' +import * as Https from 'node:https' +import * as path from 'node:path' +import * as Stream from 'node:stream' + +import async from 'async' +import BlockStream2 from 'block-stream2' +import _ from 'lodash' +import mkdirp from 'mkdirp' +import * as querystring from 'query-string' +import { TextEncoder } from 'web-encoding' +import Xml from 'xml' +import xml2js from 'xml2js' + +import { CredentialProvider } from './CredentialProvider.js' +import * as errors from './errors.ts' +import { extensions } from './extensions.js' +import { + calculateEvenSplits, + CopyDestinationOptions, + CopySourceOptions, + DEFAULT_REGION, + extractMetadata, + getScope, + getSourceVersionId, + getVersionId, + insertContentType, + isAmazonEndpoint, + isArray, + isBoolean, + isFunction, + isNumber, + isObject, + isReadableStream, + isString, + isValidBucketName, + isValidDate, + isValidEndpoint, + isValidObjectName, + isValidPort, + isValidPrefix, + isVirtualHostStyle, + LEGAL_HOLD_STATUS, + makeDateLong, + PART_CONSTRAINTS, + partsRequired, + pipesetup, + prependXAMZMeta, + promisify, + readableStream, + RETENTION_MODES, + RETENTION_VALIDITY_UNITS, + sanitizeETag, + toMd5, + toSha256, + uriEscape, + uriResourceEscape, +} from './helpers.js' +import { NotificationConfig, NotificationPoller } from './notification.js' +import { ObjectUploader } from './object-uploader.js' +import { getS3Endpoint } from './s3-endpoints.js' +import { postPresignSignatureV4, presignSignatureV4, signV4 } from './signing.js' +import * as transformers from './transformers.js' +import { parseSelectObjectContentResponse } from './xml-parsers.js' + +// will be replaced by bundler +const Package = { version: process.env.MINIO_JS_PACKAGE_VERSION || 'development' } + +export * from './helpers.js' +export * from './notification.js' + +export class Client { + constructor(params) { + if (typeof params.secure !== 'undefined') { + throw new Error('"secure" option deprecated, "useSSL" should be used instead') + } + // Default values if not specified. + if (typeof params.useSSL === 'undefined') { + params.useSSL = true + } + if (!params.port) { + params.port = 0 + } + // Validate input params. + if (!isValidEndpoint(params.endPoint)) { + throw new errors.InvalidEndpointError(`Invalid endPoint : ${params.endPoint}`) + } + if (!isValidPort(params.port)) { + throw new errors.InvalidArgumentError(`Invalid port : ${params.port}`) + } + if (!isBoolean(params.useSSL)) { + throw new errors.InvalidArgumentError( + `Invalid useSSL flag type : ${params.useSSL}, expected to be of type "boolean"`, + ) + } + + // Validate region only if its set. + if (params.region) { + if (!isString(params.region)) { + throw new errors.InvalidArgumentError(`Invalid region : ${params.region}`) + } + } + + var host = params.endPoint.toLowerCase() + var port = params.port + var protocol = '' + var transport + var transportAgent + // Validate if configuration is not using SSL + // for constructing relevant endpoints. + if (params.useSSL === false) { + transport = Http + protocol = 'http:' + if (port === 0) { + port = 80 + } + transportAgent = Http.globalAgent + } else { + // Defaults to secure. + transport = Https + protocol = 'https:' + if (port === 0) { + port = 443 + } + transportAgent = Https.globalAgent + } + + // if custom transport is set, use it. + if (params.transport) { + if (!isObject(params.transport)) { + throw new errors.InvalidArgumentError( + `Invalid transport type : ${params.transport}, expected to be type "object"`, + ) + } + transport = params.transport + } + + // if custom transport agent is set, use it. + if (params.transportAgent) { + if (!isObject(params.transportAgent)) { + throw new errors.InvalidArgumentError( + `Invalid transportAgent type: ${params.transportAgent}, expected to be type "object"`, + ) + } + + transportAgent = params.transportAgent + } + + // User Agent should always following the below style. + // Please open an issue to discuss any new changes here. + // + // MinIO (OS; ARCH) LIB/VER APP/VER + // + var libraryComments = `(${process.platform}; ${process.arch})` + var libraryAgent = `MinIO ${libraryComments} minio-js/${Package.version}` + // User agent block ends. + + this.transport = transport + this.transportAgent = transportAgent + this.host = host + this.port = port + this.protocol = protocol + this.accessKey = params.accessKey + this.secretKey = params.secretKey + this.sessionToken = params.sessionToken + this.userAgent = `${libraryAgent}` + + // Default path style is true + if (params.pathStyle === undefined) { + this.pathStyle = true + } else { + this.pathStyle = params.pathStyle + } + + if (!this.accessKey) { + this.accessKey = '' + } + if (!this.secretKey) { + this.secretKey = '' + } + this.anonymous = !this.accessKey || !this.secretKey + + if (params.credentialsProvider) { + this.credentialsProvider = params.credentialsProvider + this.checkAndRefreshCreds() + } + + this.regionMap = {} + if (params.region) { + this.region = params.region + } + + this.partSize = 64 * 1024 * 1024 + if (params.partSize) { + this.partSize = params.partSize + this.overRidePartSize = true + } + if (this.partSize < 5 * 1024 * 1024) { + throw new errors.InvalidArgumentError(`Part size should be greater than 5MB`) + } + if (this.partSize > 5 * 1024 * 1024 * 1024) { + throw new errors.InvalidArgumentError(`Part size should be less than 5GB`) + } + + this.maximumPartSize = 5 * 1024 * 1024 * 1024 + this.maxObjectSize = 5 * 1024 * 1024 * 1024 * 1024 + // SHA256 is enabled only for authenticated http requests. If the request is authenticated + // and the connection is https we use x-amz-content-sha256=UNSIGNED-PAYLOAD + // header for signature calculation. + this.enableSHA256 = !this.anonymous && !params.useSSL + + this.s3AccelerateEndpoint = params.s3AccelerateEndpoint || null + this.reqOptions = {} + } + + // This is s3 Specific and does not hold validity in any other Object storage. + getAccelerateEndPointIfSet(bucketName, objectName) { + if (!_.isEmpty(this.s3AccelerateEndpoint) && !_.isEmpty(bucketName) && !_.isEmpty(objectName)) { + // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html + // Disable transfer acceleration for non-compliant bucket names. + if (bucketName.indexOf('.') !== -1) { + throw new Error(`Transfer Acceleration is not supported for non compliant bucket:${bucketName}`) + } + // If transfer acceleration is requested set new host. + // For more details about enabling transfer acceleration read here. + // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html + return this.s3AccelerateEndpoint + } + return false + } + + /** + * @param endPoint _string_ valid S3 acceleration end point + */ + setS3TransferAccelerate(endPoint) { + this.s3AccelerateEndpoint = endPoint + } + + // Sets the supported request options. + setRequestOptions(options) { + if (!isObject(options)) { + throw new TypeError('request options should be of type "object"') + } + this.reqOptions = _.pick(options, [ + 'agent', + 'ca', + 'cert', + 'ciphers', + 'clientCertEngine', + 'crl', + 'dhparam', + 'ecdhCurve', + 'family', + 'honorCipherOrder', + 'key', + 'passphrase', + 'pfx', + 'rejectUnauthorized', + 'secureOptions', + 'secureProtocol', + 'servername', + 'sessionIdContext', + ]) + } + + // returns *options* object that can be used with http.request() + // Takes care of constructing virtual-host-style or path-style hostname + getRequestOptions(opts) { + var method = opts.method + var region = opts.region + var bucketName = opts.bucketName + var objectName = opts.objectName + var headers = opts.headers + var query = opts.query + + var reqOptions = { method } + reqOptions.headers = {} + + // If custom transportAgent was supplied earlier, we'll inject it here + reqOptions.agent = this.transportAgent + + // Verify if virtual host supported. + var virtualHostStyle + if (bucketName) { + virtualHostStyle = isVirtualHostStyle(this.host, this.protocol, bucketName, this.pathStyle) + } + + if (this.port) { + reqOptions.port = this.port + } + reqOptions.protocol = this.protocol + + if (objectName) { + objectName = `${uriResourceEscape(objectName)}` + } + + reqOptions.path = '/' + + // Save host. + reqOptions.host = this.host + // For Amazon S3 endpoint, get endpoint based on region. + if (isAmazonEndpoint(reqOptions.host)) { + const accelerateEndPoint = this.getAccelerateEndPointIfSet(bucketName, objectName) + if (accelerateEndPoint) { + reqOptions.host = `${accelerateEndPoint}` + } else { + reqOptions.host = getS3Endpoint(region) + } + } + + if (virtualHostStyle && !opts.pathStyle) { + // For all hosts which support virtual host style, `bucketName` + // is part of the hostname in the following format: + // + // var host = 'bucketName.example.com' + // + if (bucketName) { + reqOptions.host = `${bucketName}.${reqOptions.host}` + } + if (objectName) { + reqOptions.path = `/${objectName}` + } + } else { + // For all S3 compatible storage services we will fallback to + // path style requests, where `bucketName` is part of the URI + // path. + if (bucketName) { + reqOptions.path = `/${bucketName}` + } + if (objectName) { + reqOptions.path = `/${bucketName}/${objectName}` + } + } + + if (query) { + reqOptions.path += `?${query}` + } + reqOptions.headers.host = reqOptions.host + if ( + (reqOptions.protocol === 'http:' && reqOptions.port !== 80) || + (reqOptions.protocol === 'https:' && reqOptions.port !== 443) + ) { + reqOptions.headers.host = `${reqOptions.host}:${reqOptions.port}` + } + reqOptions.headers['user-agent'] = this.userAgent + if (headers) { + // have all header keys in lower case - to make signing easy + _.map(headers, (v, k) => (reqOptions.headers[k.toLowerCase()] = v)) + } + + // Use any request option specified in minioClient.setRequestOptions() + reqOptions = Object.assign({}, this.reqOptions, reqOptions) + + return reqOptions + } + + // Set application specific information. + // + // Generates User-Agent in the following style. + // + // MinIO (OS; ARCH) LIB/VER APP/VER + // + // __Arguments__ + // * `appName` _string_ - Application name. + // * `appVersion` _string_ - Application version. + setAppInfo(appName, appVersion) { + if (!isString(appName)) { + throw new TypeError(`Invalid appName: ${appName}`) + } + if (appName.trim() === '') { + throw new errors.InvalidArgumentError('Input appName cannot be empty.') + } + if (!isString(appVersion)) { + throw new TypeError(`Invalid appVersion: ${appVersion}`) + } + if (appVersion.trim() === '') { + throw new errors.InvalidArgumentError('Input appVersion cannot be empty.') + } + this.userAgent = `${this.userAgent} ${appName}/${appVersion}` + } + + // Calculate part size given the object size. Part size will be atleast this.partSize + calculatePartSize(size) { + if (!isNumber(size)) { + throw new TypeError('size should be of type "number"') + } + if (size > this.maxObjectSize) { + throw new TypeError(`size should not be more than ${this.maxObjectSize}`) + } + if (this.overRidePartSize) { + return this.partSize + } + var partSize = this.partSize + for (;;) { + // while(true) {...} throws linting error. + // If partSize is big enough to accomodate the object size, then use it. + if (partSize * 10000 > size) { + return partSize + } + // Try part sizes as 64MB, 80MB, 96MB etc. + partSize += 16 * 1024 * 1024 + } + } + + // log the request, response, error + logHTTP(reqOptions, response, err) { + // if no logstreamer available return. + if (!this.logStream) { + return + } + if (!isObject(reqOptions)) { + throw new TypeError('reqOptions should be of type "object"') + } + if (response && !isReadableStream(response)) { + throw new TypeError('response should be of type "Stream"') + } + if (err && !(err instanceof Error)) { + throw new TypeError('err should be of type "Error"') + } + var logHeaders = (headers) => { + _.forEach(headers, (v, k) => { + if (k == 'authorization') { + var redacter = new RegExp('Signature=([0-9a-f]+)') + v = v.replace(redacter, 'Signature=**REDACTED**') + } + this.logStream.write(`${k}: ${v}\n`) + }) + this.logStream.write('\n') + } + this.logStream.write(`REQUEST: ${reqOptions.method} ${reqOptions.path}\n`) + logHeaders(reqOptions.headers) + if (response) { + this.logStream.write(`RESPONSE: ${response.statusCode}\n`) + logHeaders(response.headers) + } + if (err) { + this.logStream.write('ERROR BODY:\n') + var errJSON = JSON.stringify(err, null, '\t') + this.logStream.write(`${errJSON}\n`) + } + } + + // Enable tracing + traceOn(stream) { + if (!stream) { + stream = process.stdout + } + this.logStream = stream + } + + // Disable tracing + traceOff() { + this.logStream = null + } + + // makeRequest is the primitive used by the apis for making S3 requests. + // payload can be empty string in case of no payload. + // statusCode is the expected statusCode. If response.statusCode does not match + // we parse the XML error and call the callback with the error message. + // A valid region is passed by the calls - listBuckets, makeBucket and + // getBucketRegion. + makeRequest(options, payload, statusCodes, region, returnResponse, cb) { + if (!isObject(options)) { + throw new TypeError('options should be of type "object"') + } + if (!isString(payload) && !isObject(payload)) { + // Buffer is of type 'object' + throw new TypeError('payload should be of type "string" or "Buffer"') + } + statusCodes.forEach((statusCode) => { + if (!isNumber(statusCode)) { + throw new TypeError('statusCode should be of type "number"') + } + }) + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isBoolean(returnResponse)) { + throw new TypeError('returnResponse should be of type "boolean"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + if (!options.headers) { + options.headers = {} + } + if (options.method === 'POST' || options.method === 'PUT' || options.method === 'DELETE') { + options.headers['content-length'] = payload.length + } + var sha256sum = '' + if (this.enableSHA256) { + sha256sum = toSha256(payload) + } + var stream = readableStream(payload) + this.makeRequestStream(options, stream, sha256sum, statusCodes, region, returnResponse, cb) + } + + // makeRequestStream will be used directly instead of makeRequest in case the payload + // is available as a stream. for ex. putObject + makeRequestStream(options, stream, sha256sum, statusCodes, region, returnResponse, cb) { + if (!isObject(options)) { + throw new TypeError('options should be of type "object"') + } + if (!isReadableStream(stream)) { + throw new errors.InvalidArgumentError('stream should be a readable Stream') + } + if (!isString(sha256sum)) { + throw new TypeError('sha256sum should be of type "string"') + } + statusCodes.forEach((statusCode) => { + if (!isNumber(statusCode)) { + throw new TypeError('statusCode should be of type "number"') + } + }) + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isBoolean(returnResponse)) { + throw new TypeError('returnResponse should be of type "boolean"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + // sha256sum will be empty for anonymous or https requests + if (!this.enableSHA256 && sha256sum.length !== 0) { + throw new errors.InvalidArgumentError(`sha256sum expected to be empty for anonymous or https requests`) + } + // sha256sum should be valid for non-anonymous http requests. + if (this.enableSHA256 && sha256sum.length !== 64) { + throw new errors.InvalidArgumentError(`Invalid sha256sum : ${sha256sum}`) + } + + var _makeRequest = (e, region) => { + if (e) { + return cb(e) + } + options.region = region + var reqOptions = this.getRequestOptions(options) + if (!this.anonymous) { + // For non-anonymous https requests sha256sum is 'UNSIGNED-PAYLOAD' for signature calculation. + if (!this.enableSHA256) { + sha256sum = 'UNSIGNED-PAYLOAD' + } + + let date = new Date() + + reqOptions.headers['x-amz-date'] = makeDateLong(date) + reqOptions.headers['x-amz-content-sha256'] = sha256sum + if (this.sessionToken) { + reqOptions.headers['x-amz-security-token'] = this.sessionToken + } + + this.checkAndRefreshCreds() + var authorization = signV4(reqOptions, this.accessKey, this.secretKey, region, date) + reqOptions.headers.authorization = authorization + } + var req = this.transport.request(reqOptions, (response) => { + if (!statusCodes.includes(response.statusCode)) { + // For an incorrect region, S3 server always sends back 400. + // But we will do cache invalidation for all errors so that, + // in future, if AWS S3 decides to send a different status code or + // XML error code we will still work fine. + delete this.regionMap[options.bucketName] + var errorTransformer = transformers.getErrorTransformer(response) + pipesetup(response, errorTransformer).on('error', (e) => { + this.logHTTP(reqOptions, response, e) + cb(e) + }) + return + } + this.logHTTP(reqOptions, response) + if (returnResponse) { + return cb(null, response) + } + // We drain the socket so that the connection gets closed. Note that this + // is not expensive as the socket will not have any data. + response.on('data', () => {}) + cb(null) + }) + let pipe = pipesetup(stream, req) + pipe.on('error', (e) => { + this.logHTTP(reqOptions, null, e) + cb(e) + }) + } + if (region) { + return _makeRequest(null, region) + } + this.getBucketRegion(options.bucketName, _makeRequest) + } + + // gets the region of the bucket + getBucketRegion(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) + } + if (!isFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + + // Region is set with constructor, return the region right here. + if (this.region) { + return cb(null, this.region) + } + + if (this.regionMap[bucketName]) { + return cb(null, this.regionMap[bucketName]) + } + var extractRegion = (response) => { + var transformer = transformers.getBucketRegionTransformer() + var region = DEFAULT_REGION + pipesetup(response, transformer) + .on('error', cb) + .on('data', (data) => { + if (data) { + region = data + } + }) + .on('end', () => { + this.regionMap[bucketName] = region + cb(null, region) + }) + } + + var method = 'GET' + var query = 'location' + + // `getBucketLocation` behaves differently in following ways for + // different environments. + // + // - For nodejs env we default to path style requests. + // - For browser env path style requests on buckets yields CORS + // error. To circumvent this problem we make a virtual host + // style request signed with 'us-east-1'. This request fails + // with an error 'AuthorizationHeaderMalformed', additionally + // the error XML also provides Region of the bucket. To validate + // this region is proper we retry the same request with the newly + // obtained region. + var pathStyle = this.pathStyle && typeof window === 'undefined' + + this.makeRequest({ method, bucketName, query, pathStyle }, '', [200], DEFAULT_REGION, true, (e, response) => { + if (e) { + if (e.name === 'AuthorizationHeaderMalformed') { + var region = e.Region + if (!region) { + return cb(e) + } + this.makeRequest({ method, bucketName, query }, '', [200], region, true, (e, response) => { + if (e) { + return cb(e) + } + extractRegion(response) + }) + return + } + return cb(e) + } + extractRegion(response) + }) + } + + // Creates the bucket `bucketName`. + // + // __Arguments__ + // * `bucketName` _string_ - Name of the bucket + // * `region` _string_ - region valid values are _us-west-1_, _us-west-2_, _eu-west-1_, _eu-central-1_, _ap-southeast-1_, _ap-northeast-1_, _ap-southeast-2_, _sa-east-1_. + // * `makeOpts` _object_ - Options to create a bucket. e.g {ObjectLocking:true} (Optional) + // * `callback(err)` _function_ - callback function with `err` as the error argument. `err` is null if the bucket is successfully created. + makeBucket(bucketName, region, makeOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + // Backward Compatibility + if (isObject(region)) { + cb = makeOpts + makeOpts = region + region = '' + } + if (isFunction(region)) { + cb = region + region = '' + makeOpts = {} + } + if (isFunction(makeOpts)) { + cb = makeOpts + makeOpts = {} + } + + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isObject(makeOpts)) { + throw new TypeError('makeOpts should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + var payload = '' + + // Region already set in constructor, validate if + // caller requested bucket location is same. + if (region && this.region) { + if (region !== this.region) { + throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`) + } + } + // sending makeBucket request with XML containing 'us-east-1' fails. For + // default region server expects the request without body + if (region && region !== DEFAULT_REGION) { + var createBucketConfiguration = [] + createBucketConfiguration.push({ + _attr: { + xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', + }, + }) + createBucketConfiguration.push({ + LocationConstraint: region, + }) + var payloadObject = { + CreateBucketConfiguration: createBucketConfiguration, + } + payload = Xml(payloadObject) + } + var method = 'PUT' + var headers = {} + + if (makeOpts.ObjectLocking) { + headers['x-amz-bucket-object-lock-enabled'] = true + } + + if (!region) { + region = DEFAULT_REGION + } + + const processWithRetry = (err) => { + if (err && (region === '' || region === DEFAULT_REGION)) { + if (err.code === 'AuthorizationHeaderMalformed' && err.region !== '') { + // Retry with region returned as part of error + this.makeRequest({ method, bucketName, headers }, payload, [200], err.region, false, cb) + } else { + return cb && cb(err) + } + } + return cb && cb(err) + } + this.makeRequest({ method, bucketName, headers }, payload, [200], region, false, processWithRetry) + } + + // List of buckets created. + // + // __Arguments__ + // * `callback(err, buckets)` _function_ - callback function with error as the first argument. `buckets` is an array of bucket information + // + // `buckets` array element: + // * `bucket.name` _string_ : bucket name + // * `bucket.creationDate` _Date_: date when bucket was created + listBuckets(cb) { + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + var method = 'GET' + this.makeRequest({ method }, '', [200], DEFAULT_REGION, true, (e, response) => { + if (e) { + return cb(e) + } + var transformer = transformers.getListBucketTransformer() + var buckets + pipesetup(response, transformer) + .on('data', (result) => (buckets = result)) + .on('error', (e) => cb(e)) + .on('end', () => cb(null, buckets)) + }) + } + + // Returns a stream that emits objects that are partially uploaded. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: prefix of the object names that are partially uploaded (optional, default `''`) + // * `recursive` _bool_: directory style listing when false, recursive listing when true (optional, default `false`) + // + // __Return Value__ + // * `stream` _Stream_ : emits objects of the format: + // * `object.key` _string_: name of the object + // * `object.uploadId` _string_: upload ID of the object + // * `object.size` _Integer_: size of the partially uploaded object + listIncompleteUploads(bucket, prefix, recursive) { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (!isValidBucketName(bucket)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + var delimiter = recursive ? '' : '/' + var keyMarker = '' + var uploadIdMarker = '' + var uploads = [] + var ended = false + var readStream = Stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one upload info per _read() + if (uploads.length) { + return readStream.push(uploads.shift()) + } + if (ended) { + return readStream.push(null) + } + this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + result.prefixes.forEach((prefix) => uploads.push(prefix)) + async.eachSeries( + result.uploads, + (upload, cb) => { + // for each incomplete upload add the sizes of its uploaded parts + this.listParts(bucket, upload.key, upload.uploadId, (err, parts) => { + if (err) { + return cb(err) + } + upload.size = parts.reduce((acc, item) => acc + item.size, 0) + uploads.push(upload) + cb() + }) + }, + (err) => { + if (err) { + readStream.emit('error', err) + return + } + if (result.isTruncated) { + keyMarker = result.nextKeyMarker + uploadIdMarker = result.nextUploadIdMarker + } else { + ended = true + } + readStream._read() + }, + ) + }) + } + return readStream + } + + // To check if a bucket already exists. + // + // __Arguments__ + // * `bucketName` _string_ : name of the bucket + // * `callback(err)` _function_ : `err` is `null` if the bucket exists + bucketExists(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + var method = 'HEAD' + this.makeRequest({ method, bucketName }, '', [200], '', false, (err) => { + if (err) { + if (err.code == 'NoSuchBucket' || err.code == 'NotFound') { + return cb(null, false) + } + return cb(err) + } + cb(null, true) + }) + } + + // Remove a bucket. + // + // __Arguments__ + // * `bucketName` _string_ : name of the bucket + // * `callback(err)` _function_ : `err` is `null` if the bucket is removed successfully. + removeBucket(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + var method = 'DELETE' + this.makeRequest({ method, bucketName }, '', [204], '', false, (e) => { + // If the bucket was successfully removed, remove the region map entry. + if (!e) { + delete this.regionMap[bucketName] + } + cb(e) + }) + } + + // Remove the partially uploaded object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `callback(err)` _function_: callback function is called with non `null` value in case of error + removeIncompleteUpload(bucketName, objectName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.isValidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + var removeUploadId + async.during( + (cb) => { + this.findUploadId(bucketName, objectName, (e, uploadId) => { + if (e) { + return cb(e) + } + removeUploadId = uploadId + cb(null, uploadId) + }) + }, + (cb) => { + var method = 'DELETE' + var query = `uploadId=${removeUploadId}` + this.makeRequest({ method, bucketName, objectName, query }, '', [204], '', false, (e) => cb(e)) + }, + cb, + ) + } + + // Callback is called with `error` in case of error or `null` in case of success + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `filePath` _string_: path to which the object data will be written to + // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) + // * `callback(err)` _function_: callback is called with `err` in case of error. + fGetObject(bucketName, objectName, filePath, getOpts = {}, cb) { + // Input validation. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + // Backward Compatibility + if (isFunction(getOpts)) { + cb = getOpts + getOpts = {} + } + + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + // Internal data. + var partFile + var partFileStream + var objStat + + // Rename wrapper. + var rename = (err) => { + if (err) { + return cb(err) + } + fs.rename(partFile, filePath, cb) + } + + async.waterfall( + [ + (cb) => this.statObject(bucketName, objectName, getOpts, cb), + (result, cb) => { + objStat = result + // Create any missing top level directories. + mkdirp(path.dirname(filePath), cb) + }, + (ignore, cb) => { + partFile = `${filePath}.${objStat.etag}.part.minio` + fs.stat(partFile, (e, stats) => { + var offset = 0 + if (e) { + partFileStream = fs.createWriteStream(partFile, { flags: 'w' }) + } else { + if (objStat.size === stats.size) { + return rename() + } + offset = stats.size + partFileStream = fs.createWriteStream(partFile, { flags: 'a' }) + } + this.getPartialObject(bucketName, objectName, offset, 0, getOpts, cb) + }) + }, + (downloadStream, cb) => { + pipesetup(downloadStream, partFileStream) + .on('error', (e) => cb(e)) + .on('finish', cb) + }, + (cb) => fs.stat(partFile, cb), + (stats, cb) => { + if (stats.size === objStat.size) { + return cb() + } + cb(new Error('Size mismatch between downloaded file and the object')) + }, + ], + rename, + ) + } + + // Callback is called with readable stream of the object content. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) + // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream + getObject(bucketName, objectName, getOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + // Backward Compatibility + if (isFunction(getOpts)) { + cb = getOpts + getOpts = {} + } + + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + this.getPartialObject(bucketName, objectName, 0, 0, getOpts, cb) + } + + // Callback is called with readable stream of the partial object content. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `offset` _number_: offset of the object from where the stream will start + // * `length` _number_: length of the object that will be read in the stream (optional, if not specified we read the rest of the file from the offset) + // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) + // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream + getPartialObject(bucketName, objectName, offset, length, getOpts = {}, cb) { + if (isFunction(length)) { + cb = length + length = 0 + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isNumber(offset)) { + throw new TypeError('offset should be of type "number"') + } + if (!isNumber(length)) { + throw new TypeError('length should be of type "number"') + } + // Backward Compatibility + if (isFunction(getOpts)) { + cb = getOpts + getOpts = {} + } + + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + var range = '' + if (offset || length) { + if (offset) { + range = `bytes=${+offset}-` + } else { + range = 'bytes=0-' + offset = 0 + } + if (length) { + range += `${+length + offset - 1}` + } + } + + var headers = {} + if (range !== '') { + headers.range = range + } + + var expectedStatusCodes = [200] + if (range) { + expectedStatusCodes.push(206) + } + var method = 'GET' + + var query = querystring.stringify(getOpts) + this.makeRequest({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes, '', true, cb) + } + + // Uploads the object using contents from a file + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `filePath` _string_: file path of the file to be uploaded + // * `metaData` _Javascript Object_: metaData assosciated with the object + // * `callback(err, objInfo)` _function_: non null `err` indicates error, `objInfo` _object_ which contains versionId and etag. + fPutObject(bucketName, objectName, filePath, metaData, callback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + if (isFunction(metaData)) { + callback = metaData + metaData = {} // Set metaData empty if no metaData provided. + } + if (!isObject(metaData)) { + throw new TypeError('metaData should be of type "object"') + } + + // Inserts correct `content-type` attribute based on metaData and filePath + metaData = insertContentType(metaData, filePath) + + // Updates metaData to have the correct prefix if needed + metaData = prependXAMZMeta(metaData) + var size + var partSize + + async.waterfall( + [ + (cb) => fs.stat(filePath, cb), + (stats, cb) => { + size = stats.size + var stream + var cbTriggered = false + var origCb = cb + cb = function () { + if (cbTriggered) { + return + } + cbTriggered = true + if (stream) { + stream.destroy() + } + return origCb.apply(this, arguments) + } + if (size > this.maxObjectSize) { + return cb(new Error(`${filePath} size : ${stats.size}, max allowed size : 5TB`)) + } + if (size <= this.partSize) { + // simple PUT request, no multipart + var multipart = false + var uploader = this.getUploader(bucketName, objectName, metaData, multipart) + var hash = transformers.getHashSummer(this.enableSHA256) + var start = 0 + var end = size - 1 + var autoClose = true + if (size === 0) { + end = 0 + } + var options = { start, end, autoClose } + pipesetup(fs.createReadStream(filePath, options), hash) + .on('data', (data) => { + var md5sum = data.md5sum + var sha256sum = data.sha256sum + stream = fs.createReadStream(filePath, options) + uploader(stream, size, sha256sum, md5sum, (err, objInfo) => { + callback(err, objInfo) + cb(true) + }) + }) + .on('error', (e) => cb(e)) + return + } + this.findUploadId(bucketName, objectName, cb) + }, + (uploadId, cb) => { + // if there was a previous incomplete upload, fetch all its uploaded parts info + if (uploadId) { + return this.listParts(bucketName, objectName, uploadId, (e, etags) => cb(e, uploadId, etags)) + } + // there was no previous upload, initiate a new one + this.initiateNewMultipartUpload(bucketName, objectName, metaData, (e, uploadId) => cb(e, uploadId, [])) + }, + (uploadId, etags, cb) => { + partSize = this.calculatePartSize(size) + var multipart = true + var uploader = this.getUploader(bucketName, objectName, metaData, multipart) + + // convert array to object to make things easy + var parts = etags.reduce(function (acc, item) { + if (!acc[item.part]) { + acc[item.part] = item + } + return acc + }, {}) + var partsDone = [] + var partNumber = 1 + var uploadedSize = 0 + async.whilst( + (cb) => { + cb(null, uploadedSize < size) + }, + (cb) => { + var stream + var cbTriggered = false + var origCb = cb + cb = function () { + if (cbTriggered) { + return + } + cbTriggered = true + if (stream) { + stream.destroy() + } + return origCb.apply(this, arguments) + } + var part = parts[partNumber] + var hash = transformers.getHashSummer(this.enableSHA256) + var length = partSize + if (length > size - uploadedSize) { + length = size - uploadedSize + } + var start = uploadedSize + var end = uploadedSize + length - 1 + var autoClose = true + var options = { autoClose, start, end } + // verify md5sum of each part + pipesetup(fs.createReadStream(filePath, options), hash) + .on('data', (data) => { + var md5sumHex = Buffer.from(data.md5sum, 'base64').toString('hex') + if (part && md5sumHex === part.etag) { + // md5 matches, chunk already uploaded + partsDone.push({ part: partNumber, etag: part.etag }) + partNumber++ + uploadedSize += length + return cb() + } + // part is not uploaded yet, or md5 mismatch + stream = fs.createReadStream(filePath, options) + uploader(uploadId, partNumber, stream, length, data.sha256sum, data.md5sum, (e, objInfo) => { + if (e) { + return cb(e) + } + partsDone.push({ part: partNumber, etag: objInfo.etag }) + partNumber++ + uploadedSize += length + return cb() + }) + }) + .on('error', (e) => cb(e)) + }, + (e) => { + if (e) { + return cb(e) + } + cb(null, partsDone, uploadId) + }, + ) + }, + // all parts uploaded, complete the multipart upload + (etags, uploadId, cb) => this.completeMultipartUpload(bucketName, objectName, uploadId, etags, cb), + ], + (err, ...rest) => { + if (err === true) { + return + } + callback(err, ...rest) + }, + ) + } + + // Uploads the object. + // + // Uploading a stream + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `stream` _Stream_: Readable stream + // * `size` _number_: size of the object (optional) + // * `callback(err, etag)` _function_: non null `err` indicates error, `etag` _string_ is the etag of the object uploaded. + // + // Uploading "Buffer" or "string" + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `string or Buffer` _string_ or _Buffer_: string or buffer + // * `callback(err, objInfo)` _function_: `err` is `null` in case of success and `info` will have the following object details: + // * `etag` _string_: etag of the object + // * `versionId` _string_: versionId of the object + putObject(bucketName, objectName, stream, size, metaData, callback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + // We'll need to shift arguments to the left because of size and metaData. + if (isFunction(size)) { + callback = size + metaData = {} + } else if (isFunction(metaData)) { + callback = metaData + metaData = {} + } + + // We'll need to shift arguments to the left because of metaData + // and size being optional. + if (isObject(size)) { + metaData = size + } + + // Ensures Metadata has appropriate prefix for A3 API + metaData = prependXAMZMeta(metaData) + if (typeof stream === 'string' || stream instanceof Buffer) { + // Adapts the non-stream interface into a stream. + size = stream.length + stream = readableStream(stream) + } else if (!isReadableStream(stream)) { + throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"') + } + + if (!isFunction(callback)) { + throw new TypeError('callback should be of type "function"') + } + + if (isNumber(size) && size < 0) { + throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`) + } + + // Get the part size and forward that to the BlockStream. Default to the + // largest block size possible if necessary. + if (!isNumber(size)) { + size = this.maxObjectSize + } + + size = this.calculatePartSize(size) + + // s3 requires that all non-end chunks be at least `this.partSize`, + // so we chunk the stream until we hit either that size or the end before + // we flush it to s3. + let chunker = new BlockStream2({ size, zeroPadding: false }) + + // This is a Writable stream that can be written to in order to upload + // to the specified bucket and object automatically. + let uploader = new ObjectUploader(this, bucketName, objectName, size, metaData, callback) + // stream => chunker => uploader + pipesetup(stream, chunker, uploader) + } + + // Copy the object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `srcObject` _string_: path of the source object to be copied + // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) + // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object + copyObjectV1(arg1, arg2, arg3, arg4, arg5) { + var bucketName = arg1 + var objectName = arg2 + var srcObject = arg3 + var conditions, cb + if (typeof arg4 == 'function' && arg5 === undefined) { + conditions = null + cb = arg4 + } else { + conditions = arg4 + cb = arg5 + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(srcObject)) { + throw new TypeError('srcObject should be of type "string"') + } + if (srcObject === '') { + throw new errors.InvalidPrefixError(`Empty source prefix`) + } + + if (conditions !== null && !(conditions instanceof CopyConditions)) { + throw new TypeError('conditions should be of type "CopyConditions"') + } + + var headers = {} + headers['x-amz-copy-source'] = uriResourceEscape(srcObject) + + if (conditions !== null) { + if (conditions.modified !== '') { + headers['x-amz-copy-source-if-modified-since'] = conditions.modified + } + if (conditions.unmodified !== '') { + headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified + } + if (conditions.matchETag !== '') { + headers['x-amz-copy-source-if-match'] = conditions.matchETag + } + if (conditions.matchEtagExcept !== '') { + headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept + } + } + + var method = 'PUT' + this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + var transformer = transformers.getCopyObjectTransformer() + pipesetup(response, transformer) + .on('error', (e) => cb(e)) + .on('data', (data) => cb(null, data)) + }) + } + + /** + * Internal Method to perform copy of an object. + * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions + * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions + * @param cb __function__ called with null if there is an error + * @returns Promise if no callack is passed. + */ + copyObjectV2(sourceConfig, destConfig, cb) { + if (!(sourceConfig instanceof CopySourceOptions)) { + throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') + } + if (!(destConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + if (!destConfig.validate()) { + return false + } + if (!destConfig.validate()) { + return false + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) + + const bucketName = destConfig.Bucket + const objectName = destConfig.Object + + const method = 'PUT' + this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + const transformer = transformers.getCopyObjectTransformer() + pipesetup(response, transformer) + .on('error', (e) => cb(e)) + .on('data', (data) => { + const resHeaders = response.headers + + const copyObjResponse = { + Bucket: destConfig.Bucket, + Key: destConfig.Object, + LastModified: data.LastModified, + MetaData: extractMetadata(resHeaders), + VersionId: getVersionId(resHeaders), + SourceVersionId: getSourceVersionId(resHeaders), + Etag: sanitizeETag(resHeaders.etag), + Size: +resHeaders['content-length'], + } + + return cb(null, copyObjResponse) + }) + }) + } + + // Backward compatibility for Copy Object API. + copyObject(...allArgs) { + if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { + return this.copyObjectV2(...arguments) + } + return this.copyObjectV1(...arguments) + } + + // list a batch of objects + listObjectsQuery(bucketName, prefix, marker, listQueryOpts = {}) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(marker)) { + throw new TypeError('marker should be of type "string"') + } + let { Delimiter, MaxKeys, IncludeVersion } = listQueryOpts + + if (!isObject(listQueryOpts)) { + throw new TypeError('listQueryOpts should be of type "object"') + } + + if (!isString(Delimiter)) { + throw new TypeError('Delimiter should be of type "string"') + } + if (!isNumber(MaxKeys)) { + throw new TypeError('MaxKeys should be of type "number"') + } + + const queries = [] + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(Delimiter)}`) + queries.push(`encoding-type=url`) + + if (IncludeVersion) { + queries.push(`versions`) + } + + if (marker) { + marker = uriEscape(marker) + if (IncludeVersion) { + queries.push(`key-marker=${marker}`) + } else { + queries.push(`marker=${marker}`) + } + } + + // no need to escape maxKeys + if (MaxKeys) { + if (MaxKeys >= 1000) { + MaxKeys = 1000 + } + queries.push(`max-keys=${MaxKeys}`) + } + queries.sort() + var query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + + var method = 'GET' + var transformer = transformers.getListObjectsTransformer() + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return transformer.emit('error', e) + } + pipesetup(response, transformer) + }) + return transformer + } + + // List the objects in the bucket. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `listOpts _object_: query params to list object with below keys + // * listOpts.MaxKeys _int_ maximum number of keys to return + // * listOpts.IncludeVersion _bool_ true|false to include versions. + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + // * `obj.lastModified` _Date_: modified time stamp + // * `obj.isDeleteMarker` _boolean_: true if it is a delete marker + // * `obj.versionId` _string_: versionId of the object + listObjects(bucketName, prefix, recursive, listOpts = {}) { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isObject(listOpts)) { + throw new TypeError('listOpts should be of type "object"') + } + var marker = '' + const listQueryOpts = { + Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/' + MaxKeys: 1000, + IncludeVersion: listOpts.IncludeVersion, + } + var objects = [] + var ended = false + var readStream = Stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + // if there are no objects to push do query for the next batch of objects + this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + if (result.isTruncated) { + marker = result.nextMarker || result.versionIdMarker + } else { + ended = true + } + objects = result.objects + readStream._read() + }) + } + return readStream + } + + // listObjectsV2Query - (List Objects V2) - List some or all (up to 1000) of the objects in a bucket. + // + // You can use the request parameters as selection criteria to return a subset of the objects in a bucket. + // request parameters :- + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: Limits the response to keys that begin with the specified prefix. + // * `continuation-token` _string_: Used to continue iterating over a set of objects. + // * `delimiter` _string_: A delimiter is a character you use to group keys. + // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. + // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. + listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(continuationToken)) { + throw new TypeError('continuationToken should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + if (!isNumber(maxKeys)) { + throw new TypeError('maxKeys should be of type "number"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + var queries = [] + + // Call for listing objects v2 API + queries.push(`list-type=2`) + queries.push(`encoding-type=url`) + + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + + if (continuationToken) { + continuationToken = uriEscape(continuationToken) + queries.push(`continuation-token=${continuationToken}`) + } + // Set start-after + if (startAfter) { + startAfter = uriEscape(startAfter) + queries.push(`start-after=${startAfter}`) + } + // no need to escape maxKeys + if (maxKeys) { + if (maxKeys >= 1000) { + maxKeys = 1000 + } + queries.push(`max-keys=${maxKeys}`) + } + queries.sort() + var query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + var method = 'GET' + var transformer = transformers.getListObjectsV2Transformer() + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return transformer.emit('error', e) + } + pipesetup(response, transformer) + }) + return transformer + } + + // List the objects in the bucket using S3 ListObjects V2 + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) + // + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + // * `obj.lastModified` _Date_: modified time stamp + listObjectsV2(bucketName, prefix, recursive, startAfter) { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (startAfter === undefined) { + startAfter = '' + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + // if recursive is false set delimiter to '/' + var delimiter = recursive ? '' : '/' + var continuationToken = '' + var objects = [] + var ended = false + var readStream = Stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + // if there are no objects to push do query for the next batch of objects + this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + if (result.isTruncated) { + continuationToken = result.nextContinuationToken + } else { + ended = true + } + objects = result.objects + readStream._read() + }) + } + return readStream + } + + // Stat information of the object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `statOpts` _object_ : Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional). + // * `callback(err, stat)` _function_: `err` is not `null` in case of error, `stat` contains the object information: + // * `stat.size` _number_: size of the object + // * `stat.etag` _string_: etag of the object + // * `stat.metaData` _string_: MetaData of the object + // * `stat.lastModified` _Date_: modified time stamp + // * `stat.versionId` _string_: version id of the object if available + statObject(bucketName, objectName, statOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + // backward compatibility + if (isFunction(statOpts)) { + cb = statOpts + statOpts = {} + } + + if (!isObject(statOpts)) { + throw new errors.InvalidArgumentError('statOpts should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + var query = querystring.stringify(statOpts) + var method = 'HEAD' + this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + // We drain the socket so that the connection gets closed. Note that this + // is not expensive as the socket will not have any data. + response.on('data', () => {}) + + const result = { + size: +response.headers['content-length'], + metaData: extractMetadata(response.headers), + lastModified: new Date(response.headers['last-modified']), + versionId: getVersionId(response.headers), + etag: sanitizeETag(response.headers.etag), + } + + cb(null, result) + }) + } + + // Remove the specified object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `removeOpts` _object_: Version of the object in the form `{versionId:'my-uuid', governanceBypass:true|false, forceDelete:true|false}`. Default is `{}`. (optional) + // * `callback(err)` _function_: callback function is called with non `null` value in case of error + removeObject(bucketName, objectName, removeOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + // backward compatibility + if (isFunction(removeOpts)) { + cb = removeOpts + removeOpts = {} + } + + if (!isObject(removeOpts)) { + throw new errors.InvalidArgumentError('removeOpts should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'DELETE' + const queryParams = {} + + if (removeOpts.versionId) { + queryParams.versionId = `${removeOpts.versionId}` + } + const headers = {} + if (removeOpts.governanceBypass) { + headers['X-Amz-Bypass-Governance-Retention'] = true + } + if (removeOpts.forceDelete) { + headers['x-minio-force-delete'] = true + } + + const query = querystring.stringify(queryParams) + + let requestOptions = { method, bucketName, objectName, headers } + if (query) { + requestOptions['query'] = query + } + + this.makeRequest(requestOptions, '', [200, 204], '', false, cb) + } + + // Remove all the objects residing in the objectsList. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectsList` _array_: array of objects of one of the following: + // * List of Object names as array of strings which are object keys: ['objectname1','objectname2'] + // * List of Object name and versionId as an object: [{name:"objectname",versionId:"my-version-id"}] + + removeObjects(bucketName, objectsList, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isArray(objectsList)) { + throw new errors.InvalidArgumentError('objectsList should be a list') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const maxEntries = 1000 + const query = 'delete' + const method = 'POST' + + let result = objectsList.reduce( + (result, entry) => { + result.list.push(entry) + if (result.list.length === maxEntries) { + result.listOfList.push(result.list) + result.list = [] + } + return result + }, + { listOfList: [], list: [] }, + ) + + if (result.list.length > 0) { + result.listOfList.push(result.list) + } + + const encoder = new TextEncoder() + const batchResults = [] + + async.eachSeries( + result.listOfList, + (list, batchCb) => { + var objects = [] + list.forEach(function (value) { + if (isObject(value)) { + objects.push({ Key: value.name, VersionId: value.versionId }) + } else { + objects.push({ Key: value }) + } + }) + let deleteObjects = { Delete: { Quiet: true, Object: objects } } + const builder = new xml2js.Builder({ headless: true }) + let payload = builder.buildObject(deleteObjects) + payload = encoder.encode(payload) + const headers = {} + + headers['Content-MD5'] = toMd5(payload) + + let removeObjectsResult + this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', true, (e, response) => { + if (e) { + return batchCb(e) + } + pipesetup(response, transformers.removeObjectsTransformer()) + .on('data', (data) => { + removeObjectsResult = data + }) + .on('error', (e) => { + return batchCb(e, null) + }) + .on('end', () => { + batchResults.push(removeObjectsResult) + return batchCb(null, removeObjectsResult) + }) + }) + }, + () => { + cb(null, _.flatten(batchResults)) + }, + ) + } + + // Get the policy on a bucket or an object prefix. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `callback(err, policy)` _function_: callback function + getBucketPolicy(bucketName, cb) { + // Validate arguments. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + let method = 'GET' + let query = 'policy' + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + let policy = Buffer.from('') + pipesetup(response, transformers.getConcater()) + .on('data', (data) => (policy = data)) + .on('error', cb) + .on('end', () => { + cb(null, policy.toString()) + }) + }) + } + + // Set the policy on a bucket or an object prefix. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `bucketPolicy` _string_: bucket policy (JSON stringify'ed) + // * `callback(err)` _function_: callback function + setBucketPolicy(bucketName, policy, cb) { + // Validate arguments. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isString(policy)) { + throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + let method = 'DELETE' + let query = 'policy' + + if (policy) { + method = 'PUT' + } + + this.makeRequest({ method, bucketName, query }, policy, [204], '', false, cb) + } + + // Generate a generic presigned URL which can be + // used for HTTP methods GET, PUT, HEAD and DELETE + // + // __Arguments__ + // * `method` _string_: name of the HTTP method + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `expiry` _number_: expiry in seconds (optional, default 7 days) + // * `reqParams` _object_: request parameters (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} + // * `requestDate` _Date_: A date object, the url will be issued at (optional) + presignedUrl(method, bucketName, objectName, expires, reqParams, requestDate, cb) { + if (this.anonymous) { + throw new errors.AnonymousRequestError('Presigned ' + method + ' url cannot be generated for anonymous requests') + } + if (isFunction(requestDate)) { + cb = requestDate + requestDate = new Date() + } + if (isFunction(reqParams)) { + cb = reqParams + reqParams = {} + requestDate = new Date() + } + if (isFunction(expires)) { + cb = expires + reqParams = {} + expires = 24 * 60 * 60 * 7 // 7 days in seconds + requestDate = new Date() + } + if (!isNumber(expires)) { + throw new TypeError('expires should be of type "number"') + } + if (!isObject(reqParams)) { + throw new TypeError('reqParams should be of type "object"') + } + if (!isValidDate(requestDate)) { + throw new TypeError('requestDate should be of type "Date" and valid') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + var query = querystring.stringify(reqParams) + this.getBucketRegion(bucketName, (e, region) => { + if (e) { + return cb(e) + } + // This statement is added to ensure that we send error through + // callback on presign failure. + var url + var reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query }) + + this.checkAndRefreshCreds() + try { + url = presignSignatureV4( + reqOptions, + this.accessKey, + this.secretKey, + this.sessionToken, + region, + requestDate, + expires, + ) + } catch (pe) { + return cb(pe) + } + cb(null, url) + }) + } + + // Generate a presigned URL for GET + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `expiry` _number_: expiry in seconds (optional, default 7 days) + // * `respHeaders` _object_: response headers to override or request params for query (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} + // * `requestDate` _Date_: A date object, the url will be issued at (optional) + presignedGetObject(bucketName, objectName, expires, respHeaders, requestDate, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (isFunction(respHeaders)) { + cb = respHeaders + respHeaders = {} + requestDate = new Date() + } + + var validRespHeaders = [ + 'response-content-type', + 'response-content-language', + 'response-expires', + 'response-cache-control', + 'response-content-disposition', + 'response-content-encoding', + ] + validRespHeaders.forEach((header) => { + if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) { + throw new TypeError(`response header ${header} should be of type "string"`) + } + }) + return this.presignedUrl('GET', bucketName, objectName, expires, respHeaders, requestDate, cb) + } + + // Generate a presigned URL for PUT. Using this URL, the browser can upload to S3 only with the specified object name. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `expiry` _number_: expiry in seconds (optional, default 7 days) + presignedPutObject(bucketName, objectName, expires, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + return this.presignedUrl('PUT', bucketName, objectName, expires, cb) + } + + // return PostPolicy object + newPostPolicy() { + return new PostPolicy() + } + + // presignedPostPolicy can be used in situations where we want more control on the upload than what + // presignedPutObject() provides. i.e Using presignedPostPolicy we will be able to put policy restrictions + // on the object's `name` `bucket` `expiry` `Content-Type` `Content-Disposition` `metaData` + presignedPostPolicy(postPolicy, cb) { + if (this.anonymous) { + throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests') + } + if (!isObject(postPolicy)) { + throw new TypeError('postPolicy should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + this.getBucketRegion(postPolicy.formData.bucket, (e, region) => { + if (e) { + return cb(e) + } + var date = new Date() + var dateStr = makeDateLong(date) + + this.checkAndRefreshCreds() + + if (!postPolicy.policy.expiration) { + // 'expiration' is mandatory field for S3. + // Set default expiration date of 7 days. + var expires = new Date() + expires.setSeconds(24 * 60 * 60 * 7) + postPolicy.setExpires(expires) + } + + postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr]) + postPolicy.formData['x-amz-date'] = dateStr + + postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256']) + postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' + + postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)]) + postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date) + + if (this.sessionToken) { + postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken]) + postPolicy.formData['x-amz-security-token'] = this.sessionToken + } + + var policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64') + + postPolicy.formData.policy = policyBase64 + + var signature = postPresignSignatureV4(region, date, this.secretKey, policyBase64) + + postPolicy.formData['x-amz-signature'] = signature + var opts = {} + opts.region = region + opts.bucketName = postPolicy.formData.bucket + var reqOptions = this.getRequestOptions(opts) + var portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}` + var urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}` + cb(null, { postURL: urlStr, formData: postPolicy.formData }) + }) + } + + // Calls implemented below are related to multipart. + + // Initiate a new multipart upload. + initiateNewMultipartUpload(bucketName, objectName, metaData, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(metaData)) { + throw new errors.InvalidObjectNameError('contentType should be of type "object"') + } + var method = 'POST' + let headers = Object.assign({}, metaData) + var query = 'uploads' + this.makeRequest({ method, bucketName, objectName, query, headers }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + var transformer = transformers.getInitiateMultipartTransformer() + pipesetup(response, transformer) + .on('error', (e) => cb(e)) + .on('data', (uploadId) => cb(null, uploadId)) + }) + } + + // Complete the multipart upload. After all the parts are uploaded issuing + // this call will aggregate the parts on the server into a single object. + completeMultipartUpload(bucketName, objectName, uploadId, etags, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isObject(etags)) { + throw new TypeError('etags should be of type "Array"') + } + if (!isFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + + var method = 'POST' + var query = `uploadId=${uriEscape(uploadId)}` + + var parts = [] + + etags.forEach((element) => { + parts.push({ + Part: [ + { + PartNumber: element.part, + }, + { + ETag: element.etag, + }, + ], + }) + }) + + var payloadObject = { CompleteMultipartUpload: parts } + var payload = Xml(payloadObject) + + this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + var transformer = transformers.getCompleteMultipartTransformer() + pipesetup(response, transformer) + .on('error', (e) => cb(e)) + .on('data', (result) => { + if (result.errCode) { + // Multipart Complete API returns an error XML after a 200 http status + cb(new errors.S3Error(result.errMessage)) + } else { + const completeMultipartResult = { + etag: result.etag, + versionId: getVersionId(response.headers), + } + cb(null, completeMultipartResult) + } + }) + }) + } + + // Get part-info of all parts of an incomplete upload specified by uploadId. + listParts(bucketName, objectName, uploadId, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + var parts = [] + var listNext = (marker) => { + this.listPartsQuery(bucketName, objectName, uploadId, marker, (e, result) => { + if (e) { + cb(e) + return + } + parts = parts.concat(result.parts) + if (result.isTruncated) { + listNext(result.marker) + return + } + cb(null, parts) + }) + } + listNext(0) + } + + // Called by listParts to fetch a batch of part-info + listPartsQuery(bucketName, objectName, uploadId, marker, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isNumber(marker)) { + throw new TypeError('marker should be of type "number"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + var query = '' + if (marker && marker !== 0) { + query += `part-number-marker=${marker}&` + } + query += `uploadId=${uriEscape(uploadId)}` + + var method = 'GET' + this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + var transformer = transformers.getListPartsTransformer() + pipesetup(response, transformer) + .on('error', (e) => cb(e)) + .on('data', (data) => cb(null, data)) + }) + } + + // Called by listIncompleteUploads to fetch a batch of incomplete uploads. + listIncompleteUploadsQuery(bucketName, prefix, keyMarker, uploadIdMarker, delimiter) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(keyMarker)) { + throw new TypeError('keyMarker should be of type "string"') + } + if (!isString(uploadIdMarker)) { + throw new TypeError('uploadIdMarker should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + var queries = [] + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + + if (keyMarker) { + keyMarker = uriEscape(keyMarker) + queries.push(`key-marker=${keyMarker}`) + } + if (uploadIdMarker) { + queries.push(`upload-id-marker=${uploadIdMarker}`) + } + + var maxUploads = 1000 + queries.push(`max-uploads=${maxUploads}`) + queries.sort() + queries.unshift('uploads') + var query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + var method = 'GET' + var transformer = transformers.getListMultipartTransformer() + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return transformer.emit('error', e) + } + pipesetup(response, transformer) + }) + return transformer + } + + // Find uploadId of an incomplete upload. + findUploadId(bucketName, objectName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + var latestUpload + var listNext = (keyMarker, uploadIdMarker) => { + this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '') + .on('error', (e) => cb(e)) + .on('data', (result) => { + result.uploads.forEach((upload) => { + if (upload.key === objectName) { + if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) { + latestUpload = upload + return + } + } + }) + if (result.isTruncated) { + listNext(result.nextKeyMarker, result.nextUploadIdMarker) + return + } + if (latestUpload) { + return cb(null, latestUpload.uploadId) + } + cb(null, undefined) + }) + } + listNext('', '') + } + + // Returns a function that can be used for uploading objects. + // If multipart === true, it returns function that is used to upload + // a part of the multipart. + getUploader(bucketName, objectName, metaData, multipart) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isBoolean(multipart)) { + throw new TypeError('multipart should be of type "boolean"') + } + if (!isObject(metaData)) { + throw new TypeError('metadata should be of type "object"') + } + + var validate = (stream, length, sha256sum, md5sum, cb) => { + if (!isReadableStream(stream)) { + throw new TypeError('stream should be of type "Stream"') + } + if (!isNumber(length)) { + throw new TypeError('length should be of type "number"') + } + if (!isString(sha256sum)) { + throw new TypeError('sha256sum should be of type "string"') + } + if (!isString(md5sum)) { + throw new TypeError('md5sum should be of type "string"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + } + var simpleUploader = (...args) => { + validate(...args) + var query = '' + upload(query, ...args) + } + var multipartUploader = (uploadId, partNumber, ...rest) => { + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isNumber(partNumber)) { + throw new TypeError('partNumber should be of type "number"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('Empty uploadId') + } + if (!partNumber) { + throw new errors.InvalidArgumentError('partNumber cannot be 0') + } + validate(...rest) + var query = `partNumber=${partNumber}&uploadId=${uriEscape(uploadId)}` + upload(query, ...rest) + } + var upload = (query, stream, length, sha256sum, md5sum, cb) => { + var method = 'PUT' + let headers = { 'Content-Length': length } + + if (!multipart) { + headers = Object.assign({}, metaData, headers) + } + + if (!this.enableSHA256) { + headers['Content-MD5'] = md5sum + } + this.makeRequestStream( + { method, bucketName, objectName, query, headers }, + stream, + sha256sum, + [200], + '', + true, + (e, response) => { + if (e) { + return cb(e) + } + const result = { + etag: sanitizeETag(response.headers.etag), + versionId: getVersionId(response.headers), + } + // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) + response.on('data', () => {}) + cb(null, result) + }, + ) + } + if (multipart) { + return multipartUploader + } + return simpleUploader + } + + // Remove all the notification configurations in the S3 provider + setBucketNotification(bucketName, config, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(config)) { + throw new TypeError('notification config should be of type "Object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + var method = 'PUT' + var query = 'notification' + var builder = new xml2js.Builder({ + rootName: 'NotificationConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + var payload = builder.buildObject(config) + this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) + } + + removeAllBucketNotification(bucketName, cb) { + this.setBucketNotification(bucketName, new NotificationConfig(), cb) + } + + // Return the list of notification configurations stored + // in the S3 provider + getBucketNotification(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + var method = 'GET' + var query = 'notification' + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + var transformer = transformers.getBucketNotificationTransformer() + var bucketNotification + pipesetup(response, transformer) + .on('data', (result) => (bucketNotification = result)) + .on('error', (e) => cb(e)) + .on('end', () => cb(null, bucketNotification)) + }) + } + + // Listens for bucket notifications. Returns an EventEmitter. + listenBucketNotification(bucketName, prefix, suffix, events) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix must be of type string') + } + if (!isString(suffix)) { + throw new TypeError('suffix must be of type string') + } + if (!isArray(events)) { + throw new TypeError('events must be of type Array') + } + let listener = new NotificationPoller(this, bucketName, prefix, suffix, events) + listener.start() + + return listener + } + + getBucketVersioning(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + var method = 'GET' + var query = 'versioning' + + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + let versionConfig = Buffer.from('') + pipesetup(response, transformers.bucketVersioningTransformer()) + .on('data', (data) => { + versionConfig = data + }) + .on('error', cb) + .on('end', () => { + cb(null, versionConfig) + }) + }) + } + + setBucketVersioning(bucketName, versionConfig, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!Object.keys(versionConfig).length) { + throw new errors.InvalidArgumentError('versionConfig should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + var method = 'PUT' + var query = 'versioning' + var builder = new xml2js.Builder({ + rootName: 'VersioningConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + var payload = builder.buildObject(versionConfig) + + this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) + } + + /** To set Tags on a bucket or object based on the params + * __Arguments__ + * taggingParams _object_ Which contains the following properties + * bucketName _string_, + * objectName _string_ (Optional), + * tags _object_ of the form {'':'','':''} + * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + setTagging(taggingParams) { + const { bucketName, objectName, tags, putOpts = {}, cb } = taggingParams + const method = 'PUT' + let query = 'tagging' + + if (putOpts && putOpts.versionId) { + query = `${query}&versionId=${putOpts.versionId}` + } + const tagsList = [] + for (const [key, value] of Object.entries(tags)) { + tagsList.push({ Key: key, Value: value }) + } + const taggingConfig = { + Tagging: { + TagSet: { + Tag: tagsList, + }, + }, + } + const encoder = new TextEncoder() + const headers = {} + const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } }) + let payload = builder.buildObject(taggingConfig) + payload = encoder.encode(payload) + headers['Content-MD5'] = toMd5(payload) + const requestOptions = { method, bucketName, query, headers } + + if (objectName) { + requestOptions['objectName'] = objectName + } + headers['Content-MD5'] = toMd5(payload) + + this.makeRequest(requestOptions, payload, [200], '', false, cb) + } + + /** Set Tags on a Bucket + * __Arguments__ + * bucketName _string_ + * tags _object_ of the form {'':'','':''} + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + setBucketTagging(bucketName, tags, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(tags)) { + throw new errors.InvalidArgumentError('tags should be of type "object"') + } + if (Object.keys(tags).length > 10) { + throw new errors.InvalidArgumentError('maximum tags allowed is 10"') + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + return this.setTagging({ bucketName, tags, cb }) + } + + /** Set Tags on an Object + * __Arguments__ + * bucketName _string_ + * objectName _string_ + * * tags _object_ of the form {'':'','':''} + * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + setObjectTagging(bucketName, objectName, tags, putOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + if (isFunction(putOpts)) { + cb = putOpts + putOpts = {} + } + + if (!isObject(tags)) { + throw new errors.InvalidArgumentError('tags should be of type "object"') + } + if (Object.keys(tags).length > 10) { + throw new errors.InvalidArgumentError('Maximum tags allowed is 10"') + } + + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + return this.setTagging({ bucketName, objectName, tags, putOpts, cb }) + } + + /** Remove Tags on an Bucket/Object based on params + * __Arguments__ + * bucketName _string_ + * objectName _string_ (optional) + * removeOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeTagging({ bucketName, objectName, removeOpts, cb }) { + const method = 'DELETE' + let query = 'tagging' + + if (removeOpts && Object.keys(removeOpts).length && removeOpts.versionId) { + query = `${query}&versionId=${removeOpts.versionId}` + } + const requestOptions = { method, bucketName, objectName, query } + + if (objectName) { + requestOptions['objectName'] = objectName + } + this.makeRequest(requestOptions, '', [200, 204], '', true, cb) + } + + /** Remove Tags associated with a bucket + * __Arguments__ + * bucketName _string_ + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeBucketTagging(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + return this.removeTagging({ bucketName, cb }) + } + + /** Remove tags associated with an object + * __Arguments__ + * bucketName _string_ + * objectName _string_ + * removeOpts _object_ (Optional) e.g. {VersionID:"my-object-version-id"} + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeObjectTagging(bucketName, objectName, removeOpts, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + if (isFunction(removeOpts)) { + cb = removeOpts + removeOpts = {} + } + if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) { + throw new errors.InvalidArgumentError('removeOpts should be of type "object"') + } + + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return this.removeTagging({ bucketName, objectName, removeOpts, cb }) + } + + /** Get Tags associated with a Bucket + * __Arguments__ + * bucketName _string_ + * `cb(error, tags)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + getBucketTagging(bucketName, cb) { + const method = 'GET' + const query = 'tagging' + const requestOptions = { method, bucketName, query } + + this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { + var transformer = transformers.getTagsTransformer() + if (e) { + return cb(e) + } + let tagsList + pipesetup(response, transformer) + .on('data', (result) => (tagsList = result)) + .on('error', (e) => cb(e)) + .on('end', () => cb(null, tagsList)) + }) + } + + /** Get the tags associated with a bucket OR an object + * bucketName _string_ + * objectName _string_ (Optional) + * getOpts _object_ (Optional) e.g {versionId:"my-object-version-id"} + * `cb(error, tags)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + getObjectTagging(bucketName, objectName, getOpts = {}, cb = () => false) { + const method = 'GET' + let query = 'tagging' + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + if (isFunction(getOpts)) { + cb = getOpts + getOpts = {} + } + if (!isObject(getOpts)) { + throw new errors.InvalidArgumentError('getOpts should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + if (getOpts && getOpts.versionId) { + query = `${query}&versionId=${getOpts.versionId}` + } + const requestOptions = { method, bucketName, query } + if (objectName) { + requestOptions['objectName'] = objectName + } + + this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { + const transformer = transformers.getTagsTransformer() + if (e) { + return cb(e) + } + let tagsList + pipesetup(response, transformer) + .on('data', (result) => (tagsList = result)) + .on('error', (e) => cb(e)) + .on('end', () => cb(null, tagsList)) + }) + } + + /** + * Apply lifecycle configuration on a bucket. + * bucketName _string_ + * policyConfig _object_ a valid policy configuration object. + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + applyBucketLifecycle(bucketName, policyConfig, cb) { + const method = 'PUT' + const query = 'lifecycle' + + const encoder = new TextEncoder() + const headers = {} + const builder = new xml2js.Builder({ + rootName: 'LifecycleConfiguration', + headless: true, + renderOpts: { pretty: false }, + }) + let payload = builder.buildObject(policyConfig) + payload = encoder.encode(payload) + const requestOptions = { method, bucketName, query, headers } + headers['Content-MD5'] = toMd5(payload) + + this.makeRequest(requestOptions, payload, [200], '', false, cb) + } + + /** Remove lifecycle configuration of a bucket. + * bucketName _string_ + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeBucketLifecycle(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'DELETE' + const query = 'lifecycle' + this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) + } + + /** Set/Override lifecycle configuration on a bucket. if the configuration is empty, it removes the configuration. + * bucketName _string_ + * lifeCycleConfig _object_ one of the following values: (null or '') to remove the lifecycle configuration. or a valid lifecycle configuration + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + setBucketLifecycle(bucketName, lifeCycleConfig = null, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (_.isEmpty(lifeCycleConfig)) { + this.removeBucketLifecycle(bucketName, cb) + } else { + this.applyBucketLifecycle(bucketName, lifeCycleConfig, cb) + } + } + + /** Get lifecycle configuration on a bucket. + * bucketName _string_ + * `cb(config)` _function_ - callback function with lifecycle configuration as the error argument. + */ + getBucketLifecycle(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'GET' + const query = 'lifecycle' + const requestOptions = { method, bucketName, query } + + this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { + const transformer = transformers.lifecycleTransformer() + if (e) { + return cb(e) + } + let lifecycleConfig + pipesetup(response, transformer) + .on('data', (result) => (lifecycleConfig = result)) + .on('error', (e) => cb(e)) + .on('end', () => cb(null, lifecycleConfig)) + }) + } + + setObjectLockConfig(bucketName, lockConfigOpts = {}, cb) { + const retentionModes = [RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE] + const validUnits = [RETENTION_VALIDITY_UNITS.DAYS, RETENTION_VALIDITY_UNITS.YEARS] + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + if (lockConfigOpts.mode && !retentionModes.includes(lockConfigOpts.mode)) { + throw new TypeError(`lockConfigOpts.mode should be one of ${retentionModes}`) + } + if (lockConfigOpts.unit && !validUnits.includes(lockConfigOpts.unit)) { + throw new TypeError(`lockConfigOpts.unit should be one of ${validUnits}`) + } + if (lockConfigOpts.validity && !isNumber(lockConfigOpts.validity)) { + throw new TypeError(`lockConfigOpts.validity should be a number`) + } + + const method = 'PUT' + const query = 'object-lock' + + let config = { + ObjectLockEnabled: 'Enabled', + } + const configKeys = Object.keys(lockConfigOpts) + // Check if keys are present and all keys are present. + if (configKeys.length > 0) { + if (_.difference(configKeys, ['unit', 'mode', 'validity']).length !== 0) { + throw new TypeError( + `lockConfigOpts.mode,lockConfigOpts.unit,lockConfigOpts.validity all the properties should be specified.`, + ) + } else { + config.Rule = { + DefaultRetention: {}, + } + if (lockConfigOpts.mode) { + config.Rule.DefaultRetention.Mode = lockConfigOpts.mode + } + if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.DAYS) { + config.Rule.DefaultRetention.Days = lockConfigOpts.validity + } else if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.YEARS) { + config.Rule.DefaultRetention.Years = lockConfigOpts.validity + } + } + } + + const builder = new xml2js.Builder({ + rootName: 'ObjectLockConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + + const headers = {} + headers['Content-MD5'] = toMd5(payload) + + this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) + } + + getObjectLockConfig(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'object-lock' + + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + let objectLockConfig = Buffer.from('') + pipesetup(response, transformers.objectLockTransformer()) + .on('data', (data) => { + objectLockConfig = data + }) + .on('error', cb) + .on('end', () => { + cb(null, objectLockConfig) + }) + }) + } + + putObjectRetention(bucketName, objectName, retentionOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(retentionOpts)) { + throw new errors.InvalidArgumentError('retentionOpts should be of type "object"') + } else { + if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) { + throw new errors.InvalidArgumentError('Invalid value for governanceBypass', retentionOpts.governanceBypass) + } + if ( + retentionOpts.mode && + ![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode) + ) { + throw new errors.InvalidArgumentError('Invalid object retention mode ', retentionOpts.mode) + } + if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) { + throw new errors.InvalidArgumentError('Invalid value for retainUntilDate', retentionOpts.retainUntilDate) + } + if (retentionOpts.versionId && !isString(retentionOpts.versionId)) { + throw new errors.InvalidArgumentError('Invalid value for versionId', retentionOpts.versionId) + } + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'PUT' + let query = 'retention' + + const headers = {} + if (retentionOpts.governanceBypass) { + headers['X-Amz-Bypass-Governance-Retention'] = true + } + + const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true }) + const params = {} + + if (retentionOpts.mode) { + params.Mode = retentionOpts.mode + } + if (retentionOpts.retainUntilDate) { + params.RetainUntilDate = retentionOpts.retainUntilDate + } + if (retentionOpts.versionId) { + query += `&versionId=${retentionOpts.versionId}` + } + + let payload = builder.buildObject(params) + + headers['Content-MD5'] = toMd5(payload) + this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200, 204], '', false, cb) + } + + getObjectRetention(bucketName, objectName, getOpts, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(getOpts)) { + throw new errors.InvalidArgumentError('callback should be of type "object"') + } else if (getOpts.versionId && !isString(getOpts.versionId)) { + throw new errors.InvalidArgumentError('VersionID should be of type "string"') + } + if (cb && !isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + let query = 'retention' + if (getOpts.versionId) { + query += `&versionId=${getOpts.versionId}` + } + + this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + let retentionConfig = Buffer.from('') + pipesetup(response, transformers.objectRetentionTransformer()) + .on('data', (data) => { + retentionConfig = data + }) + .on('error', cb) + .on('end', () => { + cb(null, retentionConfig) + }) + }) + } + + setBucketEncryption(bucketName, encryptionConfig, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + if (isFunction(encryptionConfig)) { + cb = encryptionConfig + encryptionConfig = null + } + + if (!_.isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) { + throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed.: ' + encryptionConfig.Rule) + } + if (cb && !isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + let encryptionObj = encryptionConfig + if (_.isEmpty(encryptionConfig)) { + encryptionObj = { + // Default MinIO Server Supported Rule + Rule: [ + { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: 'AES256', + }, + }, + ], + } + } + + let method = 'PUT' + let query = 'encryption' + let builder = new xml2js.Builder({ + rootName: 'ServerSideEncryptionConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + let payload = builder.buildObject(encryptionObj) + + const headers = {} + headers['Content-MD5'] = toMd5(payload) + + this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) + } + + getBucketEncryption(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'encryption' + + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + let bucketEncConfig = Buffer.from('') + pipesetup(response, transformers.bucketEncryptionTransformer()) + .on('data', (data) => { + bucketEncConfig = data + }) + .on('error', cb) + .on('end', () => { + cb(null, bucketEncConfig) + }) + }) + } + removeBucketEncryption(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'DELETE' + const query = 'encryption' + + this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) + } + + setBucketReplication(bucketName, replicationConfig = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(replicationConfig)) { + throw new errors.InvalidArgumentError('replicationConfig should be of type "object"') + } else { + if (_.isEmpty(replicationConfig.role)) { + throw new errors.InvalidArgumentError('Role cannot be empty') + } else if (replicationConfig.role && !isString(replicationConfig.role)) { + throw new errors.InvalidArgumentError('Invalid value for role', replicationConfig.role) + } + if (_.isEmpty(replicationConfig.rules)) { + throw new errors.InvalidArgumentError('Minimum one replication rule must be specified') + } + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'PUT' + let query = 'replication' + const headers = {} + + const replicationParamsConfig = { + ReplicationConfiguration: { + Role: replicationConfig.role, + Rule: replicationConfig.rules, + }, + } + + const builder = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true }) + + let payload = builder.buildObject(replicationParamsConfig) + + headers['Content-MD5'] = toMd5(payload) + + this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) + } + + getBucketReplication(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'replication' + + this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + let replicationConfig = Buffer.from('') + pipesetup(response, transformers.replicationConfigTransformer()) + .on('data', (data) => { + replicationConfig = data + }) + .on('error', cb) + .on('end', () => { + cb(null, replicationConfig) + }) + }) + } + + removeBucketReplication(bucketName, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'DELETE' + const query = 'replication' + this.makeRequest({ method, bucketName, query }, '', [200, 204], '', false, cb) + } + + getObjectLegalHold(bucketName, objectName, getOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (isFunction(getOpts)) { + cb = getOpts + getOpts = {} + } + + if (!isObject(getOpts)) { + throw new TypeError('getOpts should be of type "Object"') + } else if (Object.keys(getOpts).length > 0 && getOpts.versionId && !isString(getOpts.versionId)) { + throw new TypeError('versionId should be of type string.:', getOpts.versionId) + } + + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + const method = 'GET' + let query = 'legal-hold' + + if (getOpts.versionId) { + query += `&versionId=${getOpts.versionId}` + } + + this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + let legalHoldConfig = Buffer.from('') + pipesetup(response, transformers.objectLegalHoldTransformer()) + .on('data', (data) => { + legalHoldConfig = data + }) + .on('error', cb) + .on('end', () => { + cb(null, legalHoldConfig) + }) + }) + } + + setObjectLegalHold(bucketName, objectName, setOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const defaultOpts = { + status: LEGAL_HOLD_STATUS.ENABLED, + } + if (isFunction(setOpts)) { + cb = setOpts + setOpts = defaultOpts + } + + if (!isObject(setOpts)) { + throw new TypeError('setOpts should be of type "Object"') + } else { + if (![LEGAL_HOLD_STATUS.ENABLED, LEGAL_HOLD_STATUS.DISABLED].includes(setOpts.status)) { + throw new TypeError('Invalid status: ' + setOpts.status) + } + if (setOpts.versionId && !setOpts.versionId.length) { + throw new TypeError('versionId should be of type string.:' + setOpts.versionId) + } + } + + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + if (_.isEmpty(setOpts)) { + setOpts = { + defaultOpts, + } + } + + const method = 'PUT' + let query = 'legal-hold' + + if (setOpts.versionId) { + query += `&versionId=${setOpts.versionId}` + } + + let config = { + Status: setOpts.status, + } + + const builder = new xml2js.Builder({ rootName: 'LegalHold', renderOpts: { pretty: false }, headless: true }) + const payload = builder.buildObject(config) + const headers = {} + headers['Content-MD5'] = toMd5(payload) + + this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200], '', false, cb) + } + async setCredentialsProvider(credentialsProvider) { + if (!(credentialsProvider instanceof CredentialProvider)) { + throw new Error('Unable to get credentials. Expected instance of CredentialProvider') + } + this.credentialsProvider = credentialsProvider + await this.checkAndRefreshCreds() + } + + async checkAndRefreshCreds() { + if (this.credentialsProvider) { + return await this.fetchCredentials() + } + } + + async fetchCredentials() { + if (this.credentialsProvider) { + const credentialsConf = await this.credentialsProvider.getCredentials() + if (credentialsConf) { + this.accessKey = credentialsConf.getAccessKey() + this.secretKey = credentialsConf.getSecretKey() + this.sessionToken = credentialsConf.getSessionToken() + } else { + throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') + } + } else { + throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') + } + } + + /** + * Internal Method to abort a multipart upload request in case of any errors. + * @param bucketName __string__ Bucket Name + * @param objectName __string__ Object Name + * @param uploadId __string__ id of a multipart upload to cancel during compose object sequence. + * @param cb __function__ callback function + */ + abortMultipartUpload(bucketName, objectName, uploadId, cb) { + const method = 'DELETE' + let query = `uploadId=${uploadId}` + + const requestOptions = { method, bucketName, objectName: objectName, query } + this.makeRequest(requestOptions, '', [204], '', false, cb) + } + + /** + * Internal method to upload a part during compose object. + * @param partConfig __object__ contains the following. + * bucketName __string__ + * objectName __string__ + * uploadID __string__ + * partNumber __number__ + * headers __object__ + * @param cb called with null incase of error. + */ + uploadPartCopy(partConfig, cb) { + const { bucketName, objectName, uploadID, partNumber, headers } = partConfig + + const method = 'PUT' + let query = `uploadId=${uploadID}&partNumber=${partNumber}` + const requestOptions = { method, bucketName, objectName: objectName, query, headers } + return this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { + let partCopyResult = Buffer.from('') + if (e) { + return cb(e) + } + pipesetup(response, transformers.uploadPartTransformer()) + .on('data', (data) => { + partCopyResult = data + }) + .on('error', cb) + .on('end', () => { + let uploadPartCopyRes = { + etag: sanitizeETag(partCopyResult.ETag), + key: objectName, + part: partNumber, + } + + cb(null, uploadPartCopyRes) + }) + }) + } + + composeObject(destObjConfig = {}, sourceObjList = [], cb) { + const me = this // many async flows. so store the ref. + const sourceFilesLength = sourceObjList.length + + if (!isArray(sourceObjList)) { + throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') + } + if (!(destObjConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + + if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, + ) + } + + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + for (let i = 0; i < sourceFilesLength; i++) { + if (!sourceObjList[i].validate()) { + return false + } + } + + if (!destObjConfig.validate()) { + return false + } + + const getStatOptions = (srcConfig) => { + let statOpts = {} + if (!_.isEmpty(srcConfig.VersionID)) { + statOpts = { + versionId: srcConfig.VersionID, + } + } + return statOpts + } + const srcObjectSizes = [] + let totalSize = 0 + let totalParts = 0 + + const sourceObjStats = sourceObjList.map((srcItem) => + me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), + ) + + return Promise.all(sourceObjStats) + .then((srcObjectInfos) => { + const validatedStats = srcObjectInfos.map((resItemStat, index) => { + const srcConfig = sourceObjList[index] + + let srcCopySize = resItemStat.size + // Check if a segment is specified, and if so, is the + // segment within object bounds? + if (srcConfig.MatchRange) { + // Since range is specified, + // 0 <= src.srcStart <= src.srcEnd + // so only invalid case to check is: + const srcStart = srcConfig.Start + const srcEnd = srcConfig.End + if (srcEnd >= srcCopySize || srcStart < 0) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, + ) + } + srcCopySize = srcEnd - srcStart + 1 + } + + // Only the last source may be less than `absMinPartSize` + if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, + ) + } + + // Is data to copy too large? + totalSize += srcCopySize + if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { + throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) + } + + // record source size + srcObjectSizes[index] = srcCopySize + + // calculate parts needed for current source + totalParts += partsRequired(srcCopySize) + // Do we need more parts than we are allowed? + if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, + ) + } + + return resItemStat + }) + + if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { + return this.copyObject(sourceObjList[0], destObjConfig, cb) // use copyObjectV2 + } + + // preserve etag to avoid modification of object while copying. + for (let i = 0; i < sourceFilesLength; i++) { + sourceObjList[i].MatchETag = validatedStats[i].etag + } + + const splitPartSizeList = validatedStats.map((resItemStat, idx) => { + const calSize = calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) + return calSize + }) + + function getUploadPartConfigList(uploadId) { + const uploadPartConfigList = [] + + splitPartSizeList.forEach((splitSize, splitIndex) => { + const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize + + let partIndex = splitIndex + 1 // part index starts from 1. + const totalUploads = Array.from(startIdx) + + const headers = sourceObjList[splitIndex].getHeaders() + + totalUploads.forEach((splitStart, upldCtrIdx) => { + let splitEnd = endIdx[upldCtrIdx] + + const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` + headers['x-amz-copy-source'] = `${sourceObj}` + headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` + + const uploadPartConfig = { + bucketName: destObjConfig.Bucket, + objectName: destObjConfig.Object, + uploadID: uploadId, + partNumber: partIndex, + headers: headers, + sourceObj: sourceObj, + } + + uploadPartConfigList.push(uploadPartConfig) + }) + }) + + return uploadPartConfigList + } + + const performUploadParts = (uploadId) => { + const uploadList = getUploadPartConfigList(uploadId) + + async.map(uploadList, me.uploadPartCopy.bind(me), (err, res) => { + if (err) { + return this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, cb) + } + const partsDone = res.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) + return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone, cb) + }) + } + + const newUploadHeaders = destObjConfig.getHeaders() + + me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders, (err, uploadId) => { + if (err) { + return cb(err, null) + } + performUploadParts(uploadId) + }) + }) + .catch((error) => { + cb(error, null) + }) + } + selectObjectContent(bucketName, objectName, selectOpts = {}, cb) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!_.isEmpty(selectOpts)) { + if (!isString(selectOpts.expression)) { + throw new TypeError('sqlExpression should be of type "string"') + } + if (!_.isEmpty(selectOpts.inputSerialization)) { + if (!isObject(selectOpts.inputSerialization)) { + throw new TypeError('inputSerialization should be of type "object"') + } + } else { + throw new TypeError('inputSerialization is required') + } + if (!_.isEmpty(selectOpts.outputSerialization)) { + if (!isObject(selectOpts.outputSerialization)) { + throw new TypeError('outputSerialization should be of type "object"') + } + } else { + throw new TypeError('outputSerialization is required') + } + } else { + throw new TypeError('valid select configuration is required') + } + + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'POST' + let query = `select` + query += '&select-type=2' + + const config = [ + { + Expression: selectOpts.expression, + }, + { + ExpressionType: selectOpts.expressionType || 'SQL', + }, + { + InputSerialization: [selectOpts.inputSerialization], + }, + { + OutputSerialization: [selectOpts.outputSerialization], + }, + ] + + // Optional + if (selectOpts.requestProgress) { + config.push({ RequestProgress: selectOpts.requestProgress }) + } + // Optional + if (selectOpts.scanRange) { + config.push({ ScanRange: selectOpts.scanRange }) + } + + const builder = new xml2js.Builder({ + rootName: 'SelectObjectContentRequest', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + + this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { + if (e) { + return cb(e) + } + + let selectResult + pipesetup(response, transformers.selectObjectContentTransformer()) + .on('data', (data) => { + selectResult = parseSelectObjectContentResponse(data) + }) + .on('error', cb) + .on('end', () => { + cb(null, selectResult) + }) + }) + } + + get extensions() { + if (!this.clientExtensions) { + this.clientExtensions = new extensions(this) + } + return this.clientExtensions + } +} + +// Promisify various public-facing APIs on the Client module. +Client.prototype.makeBucket = promisify(Client.prototype.makeBucket) +Client.prototype.listBuckets = promisify(Client.prototype.listBuckets) +Client.prototype.bucketExists = promisify(Client.prototype.bucketExists) +Client.prototype.removeBucket = promisify(Client.prototype.removeBucket) + +Client.prototype.getObject = promisify(Client.prototype.getObject) +Client.prototype.getPartialObject = promisify(Client.prototype.getPartialObject) +Client.prototype.fGetObject = promisify(Client.prototype.fGetObject) +Client.prototype.putObject = promisify(Client.prototype.putObject) +Client.prototype.fPutObject = promisify(Client.prototype.fPutObject) +Client.prototype.copyObject = promisify(Client.prototype.copyObject) +Client.prototype.statObject = promisify(Client.prototype.statObject) +Client.prototype.removeObject = promisify(Client.prototype.removeObject) +Client.prototype.removeObjects = promisify(Client.prototype.removeObjects) + +Client.prototype.presignedUrl = promisify(Client.prototype.presignedUrl) +Client.prototype.presignedGetObject = promisify(Client.prototype.presignedGetObject) +Client.prototype.presignedPutObject = promisify(Client.prototype.presignedPutObject) +Client.prototype.presignedPostPolicy = promisify(Client.prototype.presignedPostPolicy) +Client.prototype.getBucketNotification = promisify(Client.prototype.getBucketNotification) +Client.prototype.setBucketNotification = promisify(Client.prototype.setBucketNotification) +Client.prototype.removeAllBucketNotification = promisify(Client.prototype.removeAllBucketNotification) +Client.prototype.getBucketPolicy = promisify(Client.prototype.getBucketPolicy) +Client.prototype.setBucketPolicy = promisify(Client.prototype.setBucketPolicy) +Client.prototype.removeIncompleteUpload = promisify(Client.prototype.removeIncompleteUpload) +Client.prototype.getBucketVersioning = promisify(Client.prototype.getBucketVersioning) +Client.prototype.setBucketVersioning = promisify(Client.prototype.setBucketVersioning) +Client.prototype.setBucketTagging = promisify(Client.prototype.setBucketTagging) +Client.prototype.removeBucketTagging = promisify(Client.prototype.removeBucketTagging) +Client.prototype.getBucketTagging = promisify(Client.prototype.getBucketTagging) +Client.prototype.setObjectTagging = promisify(Client.prototype.setObjectTagging) +Client.prototype.removeObjectTagging = promisify(Client.prototype.removeObjectTagging) +Client.prototype.getObjectTagging = promisify(Client.prototype.getObjectTagging) +Client.prototype.setBucketLifecycle = promisify(Client.prototype.setBucketLifecycle) +Client.prototype.getBucketLifecycle = promisify(Client.prototype.getBucketLifecycle) +Client.prototype.removeBucketLifecycle = promisify(Client.prototype.removeBucketLifecycle) +Client.prototype.setObjectLockConfig = promisify(Client.prototype.setObjectLockConfig) +Client.prototype.getObjectLockConfig = promisify(Client.prototype.getObjectLockConfig) +Client.prototype.putObjectRetention = promisify(Client.prototype.putObjectRetention) +Client.prototype.getObjectRetention = promisify(Client.prototype.getObjectRetention) +Client.prototype.setBucketEncryption = promisify(Client.prototype.setBucketEncryption) +Client.prototype.getBucketEncryption = promisify(Client.prototype.getBucketEncryption) +Client.prototype.removeBucketEncryption = promisify(Client.prototype.removeBucketEncryption) +Client.prototype.setBucketReplication = promisify(Client.prototype.setBucketReplication) +Client.prototype.getBucketReplication = promisify(Client.prototype.getBucketReplication) +Client.prototype.removeBucketReplication = promisify(Client.prototype.removeBucketReplication) +Client.prototype.setObjectLegalHold = promisify(Client.prototype.setObjectLegalHold) +Client.prototype.getObjectLegalHold = promisify(Client.prototype.getObjectLegalHold) +Client.prototype.composeObject = promisify(Client.prototype.composeObject) +Client.prototype.selectObjectContent = promisify(Client.prototype.selectObjectContent) + +export class CopyConditions { + constructor() { + this.modified = '' + this.unmodified = '' + this.matchETag = '' + this.matchETagExcept = '' + } + + setModified(date) { + if (!(date instanceof Date)) { + throw new TypeError('date must be of type Date') + } + + this.modified = date.toUTCString() + } + + setUnmodified(date) { + if (!(date instanceof Date)) { + throw new TypeError('date must be of type Date') + } + + this.unmodified = date.toUTCString() + } + + setMatchETag(etag) { + this.matchETag = etag + } + + setMatchETagExcept(etag) { + this.matchETagExcept = etag + } +} + +// Build PostPolicy object that can be signed by presignedPostPolicy +export class PostPolicy { + constructor() { + this.policy = { + conditions: [], + } + this.formData = {} + } + + // set expiration date + setExpires(date) { + if (!date) { + throw new errors.InvalidDateError('Invalid date : cannot be null') + } + this.policy.expiration = date.toISOString() + } + + // set object name + setKey(objectName) { + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name : ${objectName}`) + } + this.policy.conditions.push(['eq', '$key', objectName]) + this.formData.key = objectName + } + + // set object name prefix, i.e policy allows any keys with this prefix + setKeyStartsWith(prefix) { + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + this.policy.conditions.push(['starts-with', '$key', prefix]) + this.formData.key = prefix + } + + // set bucket name + setBucket(bucketName) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) + } + this.policy.conditions.push(['eq', '$bucket', bucketName]) + this.formData.bucket = bucketName + } + + // set Content-Type + setContentType(type) { + if (!type) { + throw new Error('content-type cannot be null') + } + this.policy.conditions.push(['eq', '$Content-Type', type]) + this.formData['Content-Type'] = type + } + + // set Content-Type prefix, i.e image/ allows any image + setContentTypeStartsWith(prefix) { + if (!prefix) { + throw new Error('content-type cannot be null') + } + this.policy.conditions.push(['starts-with', '$Content-Type', prefix]) + this.formData['Content-Type'] = prefix + } + + // set Content-Disposition + setContentDisposition(value) { + if (!value) { + throw new Error('content-disposition cannot be null') + } + this.policy.conditions.push(['eq', '$Content-Disposition', value]) + this.formData['Content-Disposition'] = value + } + + // set minimum/maximum length of what Content-Length can be. + setContentLengthRange(min, max) { + if (min > max) { + throw new Error('min cannot be more than max') + } + if (min < 0) { + throw new Error('min should be > 0') + } + if (max < 0) { + throw new Error('max should be > 0') + } + this.policy.conditions.push(['content-length-range', min, max]) + } + + // set user defined metadata + setUserMetaData(metaData) { + if (!isObject(metaData)) { + throw new TypeError('metadata should be of type "object"') + } + Object.entries(metaData).forEach(([key, value]) => { + const amzMetaDataKey = `x-amz-meta-${key}` + this.policy.conditions.push(['eq', `$${amzMetaDataKey}`, value]) + this.formData[amzMetaDataKey] = value + }) + } +} diff --git a/src/notification.js b/src/notification.js new file mode 100644 index 00000000..5fe14541 --- /dev/null +++ b/src/notification.js @@ -0,0 +1,200 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { EventEmitter } from 'node:events' + +import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.js' +import * as transformers from './transformers.js' + +// Notification config - array of target configs. +// Target configs can be +// 1. Topic (simple notification service) +// 2. Queue (simple queue service) +// 3. CloudFront (lambda function) +export class NotificationConfig { + add(target) { + let instance = '' + if (target instanceof TopicConfig) { + instance = 'TopicConfiguration' + } + if (target instanceof QueueConfig) { + instance = 'QueueConfiguration' + } + if (target instanceof CloudFunctionConfig) { + instance = 'CloudFunctionConfiguration' + } + if (!this[instance]) { + this[instance] = [] + } + this[instance].push(target) + } +} + +// Base class for three supported configs. +class TargetConfig { + setId(id) { + this.Id = id + } + addEvent(newevent) { + if (!this.Event) { + this.Event = [] + } + this.Event.push(newevent) + } + addFilterSuffix(suffix) { + if (!this.Filter) { + this.Filter = { S3Key: { FilterRule: [] } } + } + this.Filter.S3Key.FilterRule.push({ Name: 'suffix', Value: suffix }) + } + addFilterPrefix(prefix) { + if (!this.Filter) { + this.Filter = { S3Key: { FilterRule: [] } } + } + this.Filter.S3Key.FilterRule.push({ Name: 'prefix', Value: prefix }) + } +} + +// 1. Topic (simple notification service) +export class TopicConfig extends TargetConfig { + constructor(arn) { + super() + this.Topic = arn + } +} + +// 2. Queue (simple queue service) +export class QueueConfig extends TargetConfig { + constructor(arn) { + super() + this.Queue = arn + } +} + +// 3. CloudFront (lambda function) +export class CloudFunctionConfig extends TargetConfig { + constructor(arn) { + super() + this.CloudFunction = arn + } +} + +export const buildARN = (partition, service, region, accountId, resource) => { + return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource +} + +export const ObjectCreatedAll = 's3:ObjectCreated:*' +export const ObjectCreatedPut = 's3:ObjectCreated:Put' +export const ObjectCreatedPost = 's3:ObjectCreated:Post' +export const ObjectCreatedCopy = 's3:ObjectCreated:Copy' +export const ObjectCreatedCompleteMultipartUpload = 's3:ObjectCreated:CompleteMultipartUpload' +export const ObjectRemovedAll = 's3:ObjectRemoved:*' +export const ObjectRemovedDelete = 's3:ObjectRemoved:Delete' +export const ObjectRemovedDeleteMarkerCreated = 's3:ObjectRemoved:DeleteMarkerCreated' +export const ObjectReducedRedundancyLostObject = 's3:ReducedRedundancyLostObject' + +// Poll for notifications, used in #listenBucketNotification. +// Listening constitutes repeatedly requesting s3 whether or not any +// changes have occurred. +export class NotificationPoller extends EventEmitter { + constructor(client, bucketName, prefix, suffix, events) { + super() + + this.client = client + this.bucketName = bucketName + this.prefix = prefix + this.suffix = suffix + this.events = events + + this.ending = false + } + + // Starts the polling. + start() { + this.ending = false + + process.nextTick(() => { + this.checkForChanges() + }) + } + + // Stops the polling. + stop() { + this.ending = true + } + + checkForChanges() { + // Don't continue if we're looping again but are cancelled. + if (this.ending) { + return + } + + let method = 'GET' + var queries = [] + if (this.prefix) { + var prefix = uriEscape(this.prefix) + queries.push(`prefix=${prefix}`) + } + if (this.suffix) { + var suffix = uriEscape(this.suffix) + queries.push(`suffix=${suffix}`) + } + if (this.events) { + this.events.forEach((s3event) => queries.push('events=' + uriEscape(s3event))) + } + queries.sort() + + var query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + const region = this.client.region || DEFAULT_REGION + this.client.makeRequest({ method, bucketName: this.bucketName, query }, '', [200], region, true, (e, response) => { + if (e) { + return this.emit('error', e) + } + + let transformer = transformers.getNotificationTransformer() + pipesetup(response, transformer) + .on('data', (result) => { + // Data is flushed periodically (every 5 seconds), so we should + // handle it after flushing from the JSON parser. + let records = result.Records + // If null (= no records), change to an empty array. + if (!records) { + records = [] + } + + // Iterate over the notifications and emit them individually. + records.forEach((record) => { + this.emit('notification', record) + }) + + // If we're done, stop. + if (this.ending) { + response.destroy() + } + }) + .on('error', (e) => this.emit('error', e)) + .on('end', () => { + // Do it again, if we haven't cancelled yet. + process.nextTick(() => { + this.checkForChanges() + }) + }) + }) + } +} diff --git a/src/object-uploader.js b/src/object-uploader.js new file mode 100644 index 00000000..6ac099c9 --- /dev/null +++ b/src/object-uploader.js @@ -0,0 +1,287 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as Crypto from 'node:crypto' +import { Transform } from 'node:stream' + +import * as querystring from 'query-string' + +import { getVersionId, sanitizeETag } from './helpers.js' + +// We extend Transform because Writable does not implement ._flush(). +export class ObjectUploader extends Transform { + constructor(client, bucketName, objectName, partSize, metaData, callback) { + super() + this.emptyStream = true + this.client = client + this.bucketName = bucketName + this.objectName = objectName + // The size of each multipart, chunked by BlockStream2. + this.partSize = partSize + // This is the metadata for the object. + this.metaData = metaData + + // Call like: callback(error, {etag, versionId}). + this.callback = callback + + // We need to keep track of what number chunk/part we're on. This increments + // each time _write() is called. Starts with 1, not 0. + this.partNumber = 1 + + // A list of the previously uploaded chunks, for resuming a file upload. This + // will be null if we aren't resuming an upload. + this.oldParts = null + + // Keep track of the etags for aggregating the chunks together later. Each + // etag represents a single chunk of the file. + this.etags = [] + + // This is for the multipart upload request — if null, we're either not initiated + // yet or we're flushing in one packet. + this.id = null + + // Handle errors. + this.on('error', (err) => { + callback(err) + }) + } + + _transform(chunk, encoding, callback) { + this.emptyStream = false + let method = 'PUT' + let headers = { 'Content-Length': chunk.length } + let md5digest = '' + + // Calculate and set Content-MD5 header if SHA256 is not set. + // This will happen only when there is a secure connection to the s3 server. + if (!this.client.enableSHA256) { + md5digest = Crypto.createHash('md5').update(chunk).digest() + headers['Content-MD5'] = md5digest.toString('base64') + } + // We can flush the object in one packet if it fits in one chunk. This is true + // if the chunk size is smaller than the part size, signifying the end of the + // stream. + if (this.partNumber == 1 && chunk.length < this.partSize) { + // PUT the chunk in a single request — use an empty query. + let options = { + method, + // Set user metadata as this is not a multipart upload + headers: Object.assign({}, this.metaData, headers), + query: '', + bucketName: this.bucketName, + objectName: this.objectName, + } + + this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { + if (err) { + return callback(err) + } + let result = { + etag: sanitizeETag(response.headers.etag), + versionId: getVersionId(response.headers), + } + // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) + response.on('data', () => {}) + + // Give the etag back, we're done! + + process.nextTick(() => { + this.callback(null, result) + }) + + // Because we're sure the stream has ended, allow it to flush and end. + callback() + }) + + return + } + + // If we aren't flushing in one packet, we need to initiate the multipart upload, + // if it hasn't already been done. The write will be buffered until the upload has been + // initiated. + if (this.id === null) { + this.once('ready', () => { + this._transform(chunk, encoding, callback) + }) + + // Check for an incomplete previous upload. + this.client.findUploadId(this.bucketName, this.objectName, (err, id) => { + if (err) { + return this.emit('error', err) + } + + // If no upload ID exists, initiate a new one. + if (!id) { + this.client.initiateNewMultipartUpload(this.bucketName, this.objectName, this.metaData, (err, id) => { + if (err) { + return callback(err) + } + + this.id = id + + // We are now ready to accept new chunks — this will flush the buffered chunk. + this.emit('ready') + }) + + return + } + + this.id = id + + // Retrieve the pre-uploaded parts, if we need to resume the upload. + this.client.listParts(this.bucketName, this.objectName, id, (err, etags) => { + if (err) { + return this.emit('error', err) + } + + // It is possible for no parts to be already uploaded. + if (!etags) { + etags = [] + } + + // oldParts will become an object, allowing oldParts[partNumber].etag + this.oldParts = etags.reduce(function (prev, item) { + if (!prev[item.part]) { + prev[item.part] = item + } + return prev + }, {}) + + this.emit('ready') + }) + }) + + return + } + + // Continue uploading various parts if we have initiated multipart upload. + let partNumber = this.partNumber++ + + // Check to see if we've already uploaded this chunk. If the hash sums match, + // we can skip to the next chunk. + if (this.oldParts) { + let oldPart = this.oldParts[partNumber] + + // Calulcate the md5 hash, if it has not already been calculated. + if (!md5digest) { + md5digest = Crypto.createHash('md5').update(chunk).digest() + } + + if (oldPart && md5digest.toString('hex') === oldPart.etag) { + // The md5 matches, the chunk has already been uploaded. + this.etags.push({ part: partNumber, etag: oldPart.etag }) + + callback() + return + } + } + + // Write the chunk with an uploader. + let query = querystring.stringify({ + partNumber: partNumber, + uploadId: this.id, + }) + + let options = { + method, + query, + headers, + bucketName: this.bucketName, + objectName: this.objectName, + } + + this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { + if (err) { + return callback(err) + } + + // In order to aggregate the parts together, we need to collect the etags. + let etag = response.headers.etag + if (etag) { + etag = etag.replace(/^"/, '').replace(/"$/, '') + } + + this.etags.push({ part: partNumber, etag }) + + // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) + response.on('data', () => {}) + + // We're ready for the next chunk. + callback() + }) + } + + _flush(callback) { + if (this.emptyStream) { + let method = 'PUT' + let headers = Object.assign({}, this.metaData, { 'Content-Length': 0 }) + let options = { + method, + headers, + query: '', + bucketName: this.bucketName, + objectName: this.objectName, + } + + this.client.makeRequest(options, '', [200], '', true, (err, response) => { + if (err) { + return callback(err) + } + + let result = { + etag: sanitizeETag(response.headers.etag), + versionId: getVersionId(response.headers), + } + + // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) + response.on('data', () => {}) + + // Give the etag back, we're done! + process.nextTick(() => { + this.callback(null, result) + }) + + // Because we're sure the stream has ended, allow it to flush and end. + callback() + }) + + return + } + // If it has been uploaded in a single packet, we don't have to do anything. + if (this.id === null) { + return + } + + // This is called when all of the chunks uploaded successfully, thus + // completing the multipart upload. + this.client.completeMultipartUpload(this.bucketName, this.objectName, this.id, this.etags, (err, etag) => { + if (err) { + return callback(err) + } + + // Call our callback on the next tick to allow the streams infrastructure + // to finish what its doing before we continue. + process.nextTick(() => { + this.callback(null, etag) + }) + + callback() + }) + } +} + +// deprecated, keep for backward compatibility. +export default ObjectUploader diff --git a/src/s3-endpoints.js b/src/s3-endpoints.js new file mode 100644 index 00000000..aa6a7921 --- /dev/null +++ b/src/s3-endpoints.js @@ -0,0 +1,50 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { isString } from './helpers.js' + +// List of currently supported endpoints. +const awsS3Endpoint = { + 'us-east-1': 's3.amazonaws.com', + 'us-east-2': 's3-us-east-2.amazonaws.com', + 'us-west-1': 's3-us-west-1.amazonaws.com', + 'us-west-2': 's3-us-west-2.amazonaws.com', + 'ca-central-1': 's3.ca-central-1.amazonaws.com', + 'eu-west-1': 's3-eu-west-1.amazonaws.com', + 'eu-west-2': 's3-eu-west-2.amazonaws.com', + 'sa-east-1': 's3-sa-east-1.amazonaws.com', + 'eu-central-1': 's3-eu-central-1.amazonaws.com', + 'ap-south-1': 's3-ap-south-1.amazonaws.com', + 'ap-southeast-1': 's3-ap-southeast-1.amazonaws.com', + 'ap-southeast-2': 's3-ap-southeast-2.amazonaws.com', + 'ap-northeast-1': 's3-ap-northeast-1.amazonaws.com', + 'cn-north-1': 's3.cn-north-1.amazonaws.com.cn', + 'ap-east-1': 's3.ap-east-1.amazonaws.com', + 'eu-north-1': 's3.eu-north-1.amazonaws.com', + // Add new endpoints here. +} + +// getS3Endpoint get relevant endpoint for the region. +export function getS3Endpoint(region) { + if (!isString(region)) { + throw new TypeError(`Invalid region: ${region}`) + } + var endpoint = awsS3Endpoint[region] + if (endpoint) { + return endpoint + } + return 's3.amazonaws.com' +} diff --git a/src/signing.js b/src/signing.js new file mode 100644 index 00000000..247206f6 --- /dev/null +++ b/src/signing.js @@ -0,0 +1,299 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as Crypto from 'node:crypto' + +import _ from 'lodash' + +import * as errors from './errors.ts' +import { getScope, isArray, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.js' + +const signV4Algorithm = 'AWS4-HMAC-SHA256' + +// getCanonicalRequest generate a canonical request of style. +// +// canonicalRequest = +// \n +// \n +// \n +// \n +// \n +// +// +function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload) { + if (!isString(method)) { + throw new TypeError('method should be of type "string"') + } + if (!isString(path)) { + throw new TypeError('path should be of type "string"') + } + if (!isObject(headers)) { + throw new TypeError('headers should be of type "object"') + } + if (!isArray(signedHeaders)) { + throw new TypeError('signedHeaders should be of type "array"') + } + if (!isString(hashedPayload)) { + throw new TypeError('hashedPayload should be of type "string"') + } + const headersArray = signedHeaders.reduce((acc, i) => { + // Trim spaces from the value (required by V4 spec) + const val = `${headers[i]}`.replace(/ +/g, ' ') + acc.push(`${i.toLowerCase()}:${val}`) + return acc + }, []) + + const requestResource = path.split('?')[0] + let requestQuery = path.split('?')[1] + if (!requestQuery) { + requestQuery = '' + } + + if (requestQuery) { + requestQuery = requestQuery + .split('&') + .sort() + .map((element) => (element.indexOf('=') === -1 ? element + '=' : element)) + .join('&') + } + + const canonical = [] + canonical.push(method.toUpperCase()) + canonical.push(requestResource) + canonical.push(requestQuery) + canonical.push(headersArray.join('\n') + '\n') + canonical.push(signedHeaders.join(';').toLowerCase()) + canonical.push(hashedPayload) + return canonical.join('\n') +} + +// generate a credential string +function getCredential(accessKey, region, requestDate, serviceName = 's3') { + if (!isString(accessKey)) { + throw new TypeError('accessKey should be of type "string"') + } + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isObject(requestDate)) { + throw new TypeError('requestDate should be of type "object"') + } + return `${accessKey}/${getScope(region, requestDate, serviceName)}` +} + +// Returns signed headers array - alphabetically sorted +function getSignedHeaders(headers) { + if (!isObject(headers)) { + throw new TypeError('request should be of type "object"') + } + // Excerpts from @lsegal - https://github.com/aws/aws-sdk-js/issues/659#issuecomment-120477258 + // + // User-Agent: + // + // This is ignored from signing because signing this causes problems with generating pre-signed URLs + // (that are executed by other agents) or when customers pass requests through proxies, which may + // modify the user-agent. + // + // Content-Length: + // + // This is ignored from signing because generating a pre-signed URL should not provide a content-length + // constraint, specifically when vending a S3 pre-signed PUT URL. The corollary to this is that when + // sending regular requests (non-pre-signed), the signature contains a checksum of the body, which + // implicitly validates the payload length (since changing the number of bytes would change the checksum) + // and therefore this header is not valuable in the signature. + // + // Content-Type: + // + // Signing this header causes quite a number of problems in browser environments, where browsers + // like to modify and normalize the content-type header in different ways. There is more information + // on this in https://github.com/aws/aws-sdk-js/issues/244. Avoiding this field simplifies logic + // and reduces the possibility of future bugs + // + // Authorization: + // + // Is skipped for obvious reasons + + const ignoredHeaders = ['authorization', 'content-length', 'content-type', 'user-agent'] + return _.map(headers, (v, header) => header) + .filter((header) => ignoredHeaders.indexOf(header) === -1) + .sort() +} + +// returns the key used for calculating signature +function getSigningKey(date, region, secretKey, serviceName = 's3') { + if (!isObject(date)) { + throw new TypeError('date should be of type "object"') + } + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isString(secretKey)) { + throw new TypeError('secretKey should be of type "string"') + } + const dateLine = makeDateShort(date) + let hmac1 = Crypto.createHmac('sha256', 'AWS4' + secretKey) + .update(dateLine) + .digest(), + hmac2 = Crypto.createHmac('sha256', hmac1).update(region).digest(), + hmac3 = Crypto.createHmac('sha256', hmac2).update(serviceName).digest() + return Crypto.createHmac('sha256', hmac3).update('aws4_request').digest() +} + +// returns the string that needs to be signed +function getStringToSign(canonicalRequest, requestDate, region, serviceName = 's3') { + if (!isString(canonicalRequest)) { + throw new TypeError('canonicalRequest should be of type "string"') + } + if (!isObject(requestDate)) { + throw new TypeError('requestDate should be of type "object"') + } + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + const hash = Crypto.createHash('sha256').update(canonicalRequest).digest('hex') + const scope = getScope(region, requestDate, serviceName) + const stringToSign = [] + stringToSign.push(signV4Algorithm) + stringToSign.push(makeDateLong(requestDate)) + stringToSign.push(scope) + stringToSign.push(hash) + const signString = stringToSign.join('\n') + return signString +} + +// calculate the signature of the POST policy +export function postPresignSignatureV4(region, date, secretKey, policyBase64) { + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isObject(date)) { + throw new TypeError('date should be of type "object"') + } + if (!isString(secretKey)) { + throw new TypeError('secretKey should be of type "string"') + } + if (!isString(policyBase64)) { + throw new TypeError('policyBase64 should be of type "string"') + } + const signingKey = getSigningKey(date, region, secretKey) + return Crypto.createHmac('sha256', signingKey).update(policyBase64).digest('hex').toLowerCase() +} + +// Returns the authorization header +export function signV4(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { + if (!isObject(request)) { + throw new TypeError('request should be of type "object"') + } + if (!isString(accessKey)) { + throw new TypeError('accessKey should be of type "string"') + } + if (!isString(secretKey)) { + throw new TypeError('secretKey should be of type "string"') + } + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + + if (!accessKey) { + throw new errors.AccessKeyRequiredError('accessKey is required for signing') + } + if (!secretKey) { + throw new errors.SecretKeyRequiredError('secretKey is required for signing') + } + + const sha256sum = request.headers['x-amz-content-sha256'] + + const signedHeaders = getSignedHeaders(request.headers) + const canonicalRequest = getCanonicalRequest(request.method, request.path, request.headers, signedHeaders, sha256sum) + const serviceIdentifier = serviceName || 's3' + const stringToSign = getStringToSign(canonicalRequest, requestDate, region, serviceIdentifier) + const signingKey = getSigningKey(requestDate, region, secretKey, serviceIdentifier) + const credential = getCredential(accessKey, region, requestDate, serviceIdentifier) + const signature = Crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex').toLowerCase() + + return `${signV4Algorithm} Credential=${credential}, SignedHeaders=${signedHeaders + .join(';') + .toLowerCase()}, Signature=${signature}` +} + +export function signV4ByServiceName(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { + return signV4(request, accessKey, secretKey, region, requestDate, serviceName) +} +// returns a presigned URL string +export function presignSignatureV4(request, accessKey, secretKey, sessionToken, region, requestDate, expires) { + if (!isObject(request)) { + throw new TypeError('request should be of type "object"') + } + if (!isString(accessKey)) { + throw new TypeError('accessKey should be of type "string"') + } + if (!isString(secretKey)) { + throw new TypeError('secretKey should be of type "string"') + } + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + + if (!accessKey) { + throw new errors.AccessKeyRequiredError('accessKey is required for presigning') + } + if (!secretKey) { + throw new errors.SecretKeyRequiredError('secretKey is required for presigning') + } + + if (!isNumber(expires)) { + throw new TypeError('expires should be of type "number"') + } + if (expires < 1) { + throw new errors.ExpiresParamError('expires param cannot be less than 1 seconds') + } + if (expires > 604800) { + throw new errors.ExpiresParamError('expires param cannot be greater than 7 days') + } + + const iso8601Date = makeDateLong(requestDate) + const signedHeaders = getSignedHeaders(request.headers) + const credential = getCredential(accessKey, region, requestDate) + const hashedPayload = 'UNSIGNED-PAYLOAD' + + const requestQuery = [] + requestQuery.push(`X-Amz-Algorithm=${signV4Algorithm}`) + requestQuery.push(`X-Amz-Credential=${uriEscape(credential)}`) + requestQuery.push(`X-Amz-Date=${iso8601Date}`) + requestQuery.push(`X-Amz-Expires=${expires}`) + requestQuery.push(`X-Amz-SignedHeaders=${uriEscape(signedHeaders.join(';').toLowerCase())}`) + if (sessionToken) { + requestQuery.push(`X-Amz-Security-Token=${uriEscape(sessionToken)}`) + } + + const resource = request.path.split('?')[0] + let query = request.path.split('?')[1] + if (query) { + query = query + '&' + requestQuery.join('&') + } else { + query = requestQuery.join('&') + } + + const path = resource + '?' + query + + const canonicalRequest = getCanonicalRequest(request.method, path, request.headers, signedHeaders, hashedPayload) + + const stringToSign = getStringToSign(canonicalRequest, requestDate, region) + const signingKey = getSigningKey(requestDate, region, secretKey) + const signature = Crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex').toLowerCase() + const presignedUrl = request.protocol + '//' + request.headers.host + path + `&X-Amz-Signature=${signature}` + return presignedUrl +} diff --git a/src/transformers.js b/src/transformers.js new file mode 100644 index 00000000..4cde9a2a --- /dev/null +++ b/src/transformers.js @@ -0,0 +1,263 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as Crypto from 'node:crypto' + +import JSONParser from 'json-stream' +import _ from 'lodash' +import Through2 from 'through2' + +import * as errors from './errors.ts' +import { isFunction } from './helpers.js' +import * as xmlParsers from './xml-parsers.js' + +// getConcater returns a stream that concatenates the input and emits +// the concatenated output when 'end' has reached. If an optional +// parser function is passed upon reaching the 'end' of the stream, +// `parser(concatenated_data)` will be emitted. +export function getConcater(parser, emitError) { + var objectMode = false + var bufs = [] + + if (parser && !isFunction(parser)) { + throw new TypeError('parser should be of type "function"') + } + + if (parser) { + objectMode = true + } + + return Through2( + { objectMode }, + function (chunk, enc, cb) { + bufs.push(chunk) + cb() + }, + function (cb) { + if (emitError) { + cb(parser(Buffer.concat(bufs).toString())) + // cb(e) would mean we have to emit 'end' by explicitly calling this.push(null) + this.push(null) + return + } + if (bufs.length) { + if (parser) { + this.push(parser(Buffer.concat(bufs).toString())) + } else { + this.push(Buffer.concat(bufs)) + } + } + cb() + }, + ) +} + +// Generates an Error object depending on http statusCode and XML body +export function getErrorTransformer(response) { + var statusCode = response.statusCode + var code, message + if (statusCode === 301) { + code = 'MovedPermanently' + message = 'Moved Permanently' + } else if (statusCode === 307) { + code = 'TemporaryRedirect' + message = 'Are you using the correct endpoint URL?' + } else if (statusCode === 403) { + code = 'AccessDenied' + message = 'Valid and authorized credentials required' + } else if (statusCode === 404) { + code = 'NotFound' + message = 'Not Found' + } else if (statusCode === 405) { + code = 'MethodNotAllowed' + message = 'Method Not Allowed' + } else if (statusCode === 501) { + code = 'MethodNotAllowed' + message = 'Method Not Allowed' + } else { + code = 'UnknownError' + message = `${statusCode}` + } + + var headerInfo = {} + // A value created by S3 compatible server that uniquely identifies + // the request. + headerInfo.amzRequestid = response.headersSent ? response.getHeader('x-amz-request-id') : null + // A special token that helps troubleshoot API replies and issues. + headerInfo.amzId2 = response.headersSent ? response.getHeader('x-amz-id-2') : null + // Region where the bucket is located. This header is returned only + // in HEAD bucket and ListObjects response. + headerInfo.amzBucketRegion = response.headersSent ? response.getHeader('x-amz-bucket-region') : null + + return getConcater((xmlString) => { + let getError = () => { + // Message should be instantiated for each S3Errors. + var e = new errors.S3Error(message) + // S3 Error code. + e.code = code + _.each(headerInfo, (value, key) => { + e[key] = value + }) + return e + } + if (!xmlString) { + return getError() + } + let e + try { + e = xmlParsers.parseError(xmlString, headerInfo) + } catch (ex) { + return getError() + } + return e + }, true) +} + +// A through stream that calculates md5sum and sha256sum +export function getHashSummer(enableSHA256) { + var md5 = Crypto.createHash('md5') + var sha256 = Crypto.createHash('sha256') + + return Through2.obj( + function (chunk, enc, cb) { + if (enableSHA256) { + sha256.update(chunk) + } else { + md5.update(chunk) + } + cb() + }, + function (cb) { + var md5sum = '' + var sha256sum = '' + if (enableSHA256) { + sha256sum = sha256.digest('hex') + } else { + md5sum = md5.digest('base64') + } + var hashData = { md5sum, sha256sum } + this.push(hashData) + this.push(null) + cb() + }, + ) +} + +// Following functions return a stream object that parses XML +// and emits suitable Javascript objects. + +// Parses CopyObject response. +export function getCopyObjectTransformer() { + return getConcater(xmlParsers.parseCopyObject) +} + +// Parses listBuckets response. +export function getListBucketTransformer() { + return getConcater(xmlParsers.parseListBucket) +} + +// Parses listMultipartUploads response. +export function getListMultipartTransformer() { + return getConcater(xmlParsers.parseListMultipart) +} + +// Parses listParts response. +export function getListPartsTransformer() { + return getConcater(xmlParsers.parseListParts) +} + +// Parses initMultipartUpload response. +export function getInitiateMultipartTransformer() { + return getConcater(xmlParsers.parseInitiateMultipart) +} + +// Parses listObjects response. +export function getListObjectsTransformer() { + return getConcater(xmlParsers.parseListObjects) +} + +// Parses listObjects response. +export function getListObjectsV2Transformer() { + return getConcater(xmlParsers.parseListObjectsV2) +} + +// Parses listObjects with metadata response. +export function getListObjectsV2WithMetadataTransformer() { + return getConcater(xmlParsers.parseListObjectsV2WithMetadata) +} + +// Parses completeMultipartUpload response. +export function getCompleteMultipartTransformer() { + return getConcater(xmlParsers.parseCompleteMultipart) +} + +// Parses getBucketLocation response. +export function getBucketRegionTransformer() { + return getConcater(xmlParsers.parseBucketRegion) +} + +// Parses GET/SET BucketNotification response +export function getBucketNotificationTransformer() { + return getConcater(xmlParsers.parseBucketNotification) +} + +// Parses a notification. +export function getNotificationTransformer() { + // This will parse and return each object. + return new JSONParser() +} + +export function bucketVersioningTransformer() { + return getConcater(xmlParsers.parseBucketVersioningConfig) +} + +export function getTagsTransformer() { + return getConcater(xmlParsers.parseTagging) +} + +export function lifecycleTransformer() { + return getConcater(xmlParsers.parseLifecycleConfig) +} + +export function objectLockTransformer() { + return getConcater(xmlParsers.parseObjectLockConfig) +} + +export function objectRetentionTransformer() { + return getConcater(xmlParsers.parseObjectRetentionConfig) +} +export function bucketEncryptionTransformer() { + return getConcater(xmlParsers.parseBucketEncryptionConfig) +} + +export function replicationConfigTransformer() { + return getConcater(xmlParsers.parseReplicationConfig) +} + +export function objectLegalHoldTransformer() { + return getConcater(xmlParsers.parseObjectLegalHoldConfig) +} + +export function uploadPartTransformer() { + return getConcater(xmlParsers.uploadPartParser) +} +export function selectObjectContentTransformer() { + return getConcater() +} + +export function removeObjectsTransformer() { + return getConcater(xmlParsers.removeObjectsParser) +} diff --git a/src/xml-parsers.js b/src/xml-parsers.js new file mode 100644 index 00000000..447ec898 --- /dev/null +++ b/src/xml-parsers.js @@ -0,0 +1,709 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import crc32 from 'buffer-crc32' +import { XMLParser } from 'fast-xml-parser' +import _ from 'lodash' + +import * as errors from './errors.ts' +import { + isObject, + parseXml, + readableStream, + RETENTION_VALIDITY_UNITS, + sanitizeETag, + sanitizeObjectKey, + SelectResults, + toArray, +} from './helpers.js' + +// Parse XML and return information as Javascript types +const fxp = new XMLParser() + +// parse error XML response +export function parseError(xml, headerInfo) { + var xmlErr = {} + var xmlObj = fxp.parse(xml) + if (xmlObj.Error) { + xmlErr = xmlObj.Error + } + + var e = new errors.S3Error() + _.each(xmlErr, (value, key) => { + e[key.toLowerCase()] = value + }) + + _.each(headerInfo, (value, key) => { + e[key] = value + }) + return e +} + +// parse XML response for copy object +export function parseCopyObject(xml) { + var result = { + etag: '', + lastModified: '', + } + + var xmlobj = parseXml(xml) + if (!xmlobj.CopyObjectResult) { + throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"') + } + xmlobj = xmlobj.CopyObjectResult + if (xmlobj.ETag) { + result.etag = xmlobj.ETag.replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + } + if (xmlobj.LastModified) { + result.lastModified = new Date(xmlobj.LastModified) + } + + return result +} + +// parse XML response for listing in-progress multipart uploads +export function parseListMultipart(xml) { + var result = { + uploads: [], + prefixes: [], + isTruncated: false, + } + + var xmlobj = parseXml(xml) + + if (!xmlobj.ListMultipartUploadsResult) { + throw new errors.InvalidXMLError('Missing tag: "ListMultipartUploadsResult"') + } + xmlobj = xmlobj.ListMultipartUploadsResult + if (xmlobj.IsTruncated) { + result.isTruncated = xmlobj.IsTruncated + } + if (xmlobj.NextKeyMarker) { + result.nextKeyMarker = xmlobj.NextKeyMarker + } + if (xmlobj.NextUploadIdMarker) { + result.nextUploadIdMarker = xmlobj.nextUploadIdMarker + } + + if (xmlobj.CommonPrefixes) { + toArray(xmlobj.CommonPrefixes).forEach((prefix) => { + result.prefixes.push({ prefix: sanitizeObjectKey(toArray(prefix.Prefix)[0]) }) + }) + } + + if (xmlobj.Upload) { + toArray(xmlobj.Upload).forEach((upload) => { + var key = upload.Key + var uploadId = upload.UploadId + var initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } + var owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } + var storageClass = upload.StorageClass + var initiated = new Date(upload.Initiated) + result.uploads.push({ key, uploadId, initiator, owner, storageClass, initiated }) + }) + } + return result +} + +// parse XML response to list all the owned buckets +export function parseListBucket(xml) { + var result = [] + var xmlobj = parseXml(xml) + + if (!xmlobj.ListAllMyBucketsResult) { + throw new errors.InvalidXMLError('Missing tag: "ListAllMyBucketsResult"') + } + xmlobj = xmlobj.ListAllMyBucketsResult + + if (xmlobj.Buckets) { + if (xmlobj.Buckets.Bucket) { + toArray(xmlobj.Buckets.Bucket).forEach((bucket) => { + var name = bucket.Name + var creationDate = new Date(bucket.CreationDate) + result.push({ name, creationDate }) + }) + } + } + return result +} + +// parse XML response for bucket notification +export function parseBucketNotification(xml) { + var result = { + TopicConfiguration: [], + QueueConfiguration: [], + CloudFunctionConfiguration: [], + } + // Parse the events list + var genEvents = function (events) { + var result = [] + if (events) { + toArray(events).forEach((s3event) => { + result.push(s3event) + }) + } + return result + } + // Parse all filter rules + var genFilterRules = function (filters) { + var result = [] + if (filters) { + filters = toArray(filters) + if (filters[0].S3Key) { + filters[0].S3Key = toArray(filters[0].S3Key) + if (filters[0].S3Key[0].FilterRule) { + toArray(filters[0].S3Key[0].FilterRule).forEach((rule) => { + var Name = toArray(rule.Name)[0] + var Value = toArray(rule.Value)[0] + result.push({ Name, Value }) + }) + } + } + } + return result + } + + var xmlobj = parseXml(xml) + xmlobj = xmlobj.NotificationConfiguration + + // Parse all topic configurations in the xml + if (xmlobj.TopicConfiguration) { + toArray(xmlobj.TopicConfiguration).forEach((config) => { + var Id = toArray(config.Id)[0] + var Topic = toArray(config.Topic)[0] + var Event = genEvents(config.Event) + var Filter = genFilterRules(config.Filter) + result.TopicConfiguration.push({ Id, Topic, Event, Filter }) + }) + } + // Parse all topic configurations in the xml + if (xmlobj.QueueConfiguration) { + toArray(xmlobj.QueueConfiguration).forEach((config) => { + var Id = toArray(config.Id)[0] + var Queue = toArray(config.Queue)[0] + var Event = genEvents(config.Event) + var Filter = genFilterRules(config.Filter) + result.QueueConfiguration.push({ Id, Queue, Event, Filter }) + }) + } + // Parse all QueueConfiguration arrays + if (xmlobj.CloudFunctionConfiguration) { + toArray(xmlobj.CloudFunctionConfiguration).forEach((config) => { + var Id = toArray(config.Id)[0] + var CloudFunction = toArray(config.CloudFunction)[0] + var Event = genEvents(config.Event) + var Filter = genFilterRules(config.Filter) + result.CloudFunctionConfiguration.push({ Id, CloudFunction, Event, Filter }) + }) + } + + return result +} + +// parse XML response for bucket region +export function parseBucketRegion(xml) { + // return region information + return parseXml(xml).LocationConstraint +} + +// parse XML response for list parts of an in progress multipart upload +export function parseListParts(xml) { + var xmlobj = parseXml(xml) + var result = { + isTruncated: false, + parts: [], + marker: undefined, + } + if (!xmlobj.ListPartsResult) { + throw new errors.InvalidXMLError('Missing tag: "ListPartsResult"') + } + xmlobj = xmlobj.ListPartsResult + if (xmlobj.IsTruncated) { + result.isTruncated = xmlobj.IsTruncated + } + if (xmlobj.NextPartNumberMarker) { + result.marker = +toArray(xmlobj.NextPartNumberMarker)[0] + } + if (xmlobj.Part) { + toArray(xmlobj.Part).forEach((p) => { + var part = +toArray(p.PartNumber)[0] + var lastModified = new Date(p.LastModified) + var etag = p.ETag.replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + result.parts.push({ part, lastModified, etag }) + }) + } + return result +} + +// parse XML response when a new multipart upload is initiated +export function parseInitiateMultipart(xml) { + var xmlobj = parseXml(xml) + + if (!xmlobj.InitiateMultipartUploadResult) { + throw new errors.InvalidXMLError('Missing tag: "InitiateMultipartUploadResult"') + } + xmlobj = xmlobj.InitiateMultipartUploadResult + + if (xmlobj.UploadId) { + return xmlobj.UploadId + } + throw new errors.InvalidXMLError('Missing tag: "UploadId"') +} + +// parse XML response when a multipart upload is completed +export function parseCompleteMultipart(xml) { + var xmlobj = parseXml(xml).CompleteMultipartUploadResult + if (xmlobj.Location) { + var location = toArray(xmlobj.Location)[0] + var bucket = toArray(xmlobj.Bucket)[0] + var key = xmlobj.Key + var etag = xmlobj.ETag.replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + + return { location, bucket, key, etag } + } + // Complete Multipart can return XML Error after a 200 OK response + if (xmlobj.Code && xmlobj.Message) { + var errCode = toArray(xmlobj.Code)[0] + var errMessage = toArray(xmlobj.Message)[0] + return { errCode, errMessage } + } +} + +const formatObjInfo = (content, opts = {}) => { + let { Key, LastModified, ETag, Size, VersionId, IsLatest } = content + + if (!isObject(opts)) { + opts = {} + } + + const name = sanitizeObjectKey(toArray(Key)[0]) + const lastModified = new Date(toArray(LastModified)[0]) + const etag = sanitizeETag(toArray(ETag)[0]) + + return { + name, + lastModified, + etag, + size: Size, + versionId: VersionId, + isLatest: IsLatest, + isDeleteMarker: opts.IsDeleteMarker ? opts.IsDeleteMarker : false, + } +} + +// parse XML response for list objects in a bucket +export function parseListObjects(xml) { + var result = { + objects: [], + isTruncated: false, + } + let isTruncated = false + let nextMarker, nextVersionKeyMarker + const xmlobj = parseXml(xml) + + const parseCommonPrefixesEntity = (responseEntity) => { + if (responseEntity) { + toArray(responseEntity).forEach((commonPrefix) => { + result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) + }) + } + } + + const listBucketResult = xmlobj.ListBucketResult + const listVersionsResult = xmlobj.ListVersionsResult + + if (listBucketResult) { + if (listBucketResult.IsTruncated) { + isTruncated = listBucketResult.IsTruncated + } + if (listBucketResult.Contents) { + toArray(listBucketResult.Contents).forEach((content) => { + const name = sanitizeObjectKey(toArray(content.Key)[0]) + const lastModified = new Date(toArray(content.LastModified)[0]) + const etag = sanitizeETag(toArray(content.ETag)[0]) + const size = content.Size + result.objects.push({ name, lastModified, etag, size }) + }) + } + + if (listBucketResult.NextMarker) { + nextMarker = listBucketResult.NextMarker + } + parseCommonPrefixesEntity(listBucketResult.CommonPrefixes) + } + + if (listVersionsResult) { + if (listVersionsResult.IsTruncated) { + isTruncated = listVersionsResult.IsTruncated + } + + if (listVersionsResult.Version) { + toArray(listVersionsResult.Version).forEach((content) => { + result.objects.push(formatObjInfo(content)) + }) + } + if (listVersionsResult.DeleteMarker) { + toArray(listVersionsResult.DeleteMarker).forEach((content) => { + result.objects.push(formatObjInfo(content, { IsDeleteMarker: true })) + }) + } + + if (listVersionsResult.NextKeyMarker) { + nextVersionKeyMarker = listVersionsResult.NextKeyMarker + } + if (listVersionsResult.NextVersionIdMarker) { + result.versionIdMarker = listVersionsResult.NextVersionIdMarker + } + parseCommonPrefixesEntity(listVersionsResult.CommonPrefixes) + } + + result.isTruncated = isTruncated + if (isTruncated) { + result.nextMarker = nextVersionKeyMarker || nextMarker + } + return result +} + +// parse XML response for list objects v2 in a bucket +export function parseListObjectsV2(xml) { + var result = { + objects: [], + isTruncated: false, + } + var xmlobj = parseXml(xml) + if (!xmlobj.ListBucketResult) { + throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') + } + xmlobj = xmlobj.ListBucketResult + if (xmlobj.IsTruncated) { + result.isTruncated = xmlobj.IsTruncated + } + if (xmlobj.NextContinuationToken) { + result.nextContinuationToken = xmlobj.NextContinuationToken + } + if (xmlobj.Contents) { + toArray(xmlobj.Contents).forEach((content) => { + var name = sanitizeObjectKey(toArray(content.Key)[0]) + var lastModified = new Date(content.LastModified) + var etag = sanitizeETag(content.ETag) + var size = content.Size + result.objects.push({ name, lastModified, etag, size }) + }) + } + if (xmlobj.CommonPrefixes) { + toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => { + result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) + }) + } + return result +} + +// parse XML response for list objects v2 with metadata in a bucket +export function parseListObjectsV2WithMetadata(xml) { + var result = { + objects: [], + isTruncated: false, + } + var xmlobj = parseXml(xml) + if (!xmlobj.ListBucketResult) { + throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') + } + xmlobj = xmlobj.ListBucketResult + if (xmlobj.IsTruncated) { + result.isTruncated = xmlobj.IsTruncated + } + if (xmlobj.NextContinuationToken) { + result.nextContinuationToken = xmlobj.NextContinuationToken + } + + if (xmlobj.Contents) { + toArray(xmlobj.Contents).forEach((content) => { + var name = sanitizeObjectKey(content.Key) + var lastModified = new Date(content.LastModified) + var etag = sanitizeETag(content.ETag) + var size = content.Size + var metadata + if (content.UserMetadata != null) { + metadata = toArray(content.UserMetadata)[0] + } else { + metadata = null + } + result.objects.push({ name, lastModified, etag, size, metadata }) + }) + } + + if (xmlobj.CommonPrefixes) { + toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => { + result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) + }) + } + return result +} + +export function parseBucketVersioningConfig(xml) { + var xmlObj = parseXml(xml) + return xmlObj.VersioningConfiguration +} + +export function parseTagging(xml) { + const xmlObj = parseXml(xml) + let result = [] + if (xmlObj.Tagging && xmlObj.Tagging.TagSet && xmlObj.Tagging.TagSet.Tag) { + const tagResult = xmlObj.Tagging.TagSet.Tag + // if it is a single tag convert into an array so that the return value is always an array. + if (isObject(tagResult)) { + result.push(tagResult) + } else { + result = tagResult + } + } + return result +} + +export function parseLifecycleConfig(xml) { + const xmlObj = parseXml(xml) + return xmlObj.LifecycleConfiguration +} + +export function parseObjectLockConfig(xml) { + const xmlObj = parseXml(xml) + let lockConfigResult = {} + if (xmlObj.ObjectLockConfiguration) { + lockConfigResult = { + objectLockEnabled: xmlObj.ObjectLockConfiguration.ObjectLockEnabled, + } + let retentionResp + if ( + xmlObj.ObjectLockConfiguration && + xmlObj.ObjectLockConfiguration.Rule && + xmlObj.ObjectLockConfiguration.Rule.DefaultRetention + ) { + retentionResp = xmlObj.ObjectLockConfiguration.Rule.DefaultRetention || {} + lockConfigResult.mode = retentionResp.Mode + } + if (retentionResp) { + const isUnitYears = retentionResp.Years + if (isUnitYears) { + lockConfigResult.validity = isUnitYears + lockConfigResult.unit = RETENTION_VALIDITY_UNITS.YEARS + } else { + lockConfigResult.validity = retentionResp.Days + lockConfigResult.unit = RETENTION_VALIDITY_UNITS.DAYS + } + } + return lockConfigResult + } +} + +export function parseObjectRetentionConfig(xml) { + const xmlObj = parseXml(xml) + const retentionConfig = xmlObj.Retention + + return { + mode: retentionConfig.Mode, + retainUntilDate: retentionConfig.RetainUntilDate, + } +} + +export function parseBucketEncryptionConfig(xml) { + let encConfig = parseXml(xml) + return encConfig +} +export function parseReplicationConfig(xml) { + const xmlObj = parseXml(xml) + const replicationConfig = { + ReplicationConfiguration: { + role: xmlObj.ReplicationConfiguration.Role, + rules: toArray(xmlObj.ReplicationConfiguration.Rule), + }, + } + return replicationConfig +} + +export function parseObjectLegalHoldConfig(xml) { + const xmlObj = parseXml(xml) + return xmlObj.LegalHold +} + +export function uploadPartParser(xml) { + const xmlObj = parseXml(xml) + const respEl = xmlObj.CopyPartResult + return respEl +} + +export function removeObjectsParser(xml) { + const xmlObj = parseXml(xml) + if (xmlObj.DeleteResult && xmlObj.DeleteResult.Error) { + // return errors as array always. as the response is object in case of single object passed in removeObjects + return toArray(xmlObj.DeleteResult.Error) + } + return [] +} + +export function parseSelectObjectContentResponse(res) { + // extractHeaderType extracts the first half of the header message, the header type. + function extractHeaderType(stream) { + const headerNameLen = Buffer.from(stream.read(1)).readUInt8() + const headerNameWithSeparator = Buffer.from(stream.read(headerNameLen)).toString() + const splitBySeparator = (headerNameWithSeparator || '').split(':') + const headerName = splitBySeparator.length >= 1 ? splitBySeparator[1] : '' + return headerName + } + + function extractHeaderValue(stream) { + const bodyLen = Buffer.from(stream.read(2)).readUInt16BE() + const bodyName = Buffer.from(stream.read(bodyLen)).toString() + return bodyName + } + + const selectResults = new SelectResults({}) // will be returned + + const responseStream = readableStream(res) // convert byte array to a readable responseStream + while (responseStream._readableState.length) { + // Top level responseStream read tracker. + let msgCrcAccumulator // accumulate from start of the message till the message crc start. + + const totalByteLengthBuffer = Buffer.from(responseStream.read(4)) + msgCrcAccumulator = crc32(totalByteLengthBuffer) + + const headerBytesBuffer = Buffer.from(responseStream.read(4)) + msgCrcAccumulator = crc32(headerBytesBuffer, msgCrcAccumulator) + + const calculatedPreludeCrc = msgCrcAccumulator.readInt32BE() // use it to check if any CRC mismatch in header itself. + + const preludeCrcBuffer = Buffer.from(responseStream.read(4)) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc) + msgCrcAccumulator = crc32(preludeCrcBuffer, msgCrcAccumulator) + + const totalMsgLength = totalByteLengthBuffer.readInt32BE() + const headerLength = headerBytesBuffer.readInt32BE() + const preludeCrcByteValue = preludeCrcBuffer.readInt32BE() + + if (preludeCrcByteValue !== calculatedPreludeCrc) { + // Handle Header CRC mismatch Error + throw new Error( + `Header Checksum Mismatch, Prelude CRC of ${preludeCrcByteValue} does not equal expected CRC of ${calculatedPreludeCrc}`, + ) + } + + const headers = {} + if (headerLength > 0) { + const headerBytes = Buffer.from(responseStream.read(headerLength)) + msgCrcAccumulator = crc32(headerBytes, msgCrcAccumulator) + const headerReaderStream = readableStream(headerBytes) + while (headerReaderStream._readableState.length) { + let headerTypeName = extractHeaderType(headerReaderStream) + headerReaderStream.read(1) // just read and ignore it. + headers[headerTypeName] = extractHeaderValue(headerReaderStream) + } + } + + let payloadStream + const payLoadLength = totalMsgLength - headerLength - 16 + if (payLoadLength > 0) { + const payLoadBuffer = Buffer.from(responseStream.read(payLoadLength)) + msgCrcAccumulator = crc32(payLoadBuffer, msgCrcAccumulator) + // read the checksum early and detect any mismatch so we can avoid unnecessary further processing. + const messageCrcByteValue = Buffer.from(responseStream.read(4)).readInt32BE() + const calculatedCrc = msgCrcAccumulator.readInt32BE() + // Handle message CRC Error + if (messageCrcByteValue !== calculatedCrc) { + throw new Error( + `Message Checksum Mismatch, Message CRC of ${messageCrcByteValue} does not equal expected CRC of ${calculatedCrc}`, + ) + } + payloadStream = readableStream(payLoadBuffer) + } + + const messageType = headers['message-type'] + + switch (messageType) { + case 'error': { + const errorMessage = headers['error-code'] + ':"' + headers['error-message'] + '"' + throw new Error(errorMessage) + } + case 'event': { + const contentType = headers['content-type'] + const eventType = headers['event-type'] + + switch (eventType) { + case 'End': { + selectResults.setResponse(res) + return selectResults + } + + case 'Records': { + const readData = payloadStream.read(payLoadLength) + selectResults.setRecords(readData) + break + } + + case 'Progress': + { + switch (contentType) { + case 'text/xml': { + const progressData = payloadStream.read(payLoadLength) + selectResults.setProgress(progressData.toString()) + break + } + default: { + const errorMessage = `Unexpected content-type ${contentType} sent for event-type Progress` + throw new Error(errorMessage) + } + } + } + break + case 'Stats': + { + switch (contentType) { + case 'text/xml': { + const statsData = payloadStream.read(payLoadLength) + selectResults.setStats(statsData.toString()) + break + } + default: { + const errorMessage = `Unexpected content-type ${contentType} sent for event-type Stats` + throw new Error(errorMessage) + } + } + } + break + default: { + // Continuation message: Not sure if it is supported. did not find a reference or any message in response. + // It does not have a payload. + const warningMessage = `Un implemented event detected ${messageType}.` + // eslint-disable-next-line no-console + console.warn(warningMessage) + } + } // eventType End + } // Event End + } // messageType End + } // Top Level Stream End +} diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js new file mode 100644 index 00000000..d0ab714b --- /dev/null +++ b/tests/functional/functional-tests.js @@ -0,0 +1,4718 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as crypto from 'node:crypto' +import * as fs from 'node:fs' +import * as http from 'node:http' +import * as https from 'node:https' +import * as os from 'node:os' +import * as stream from 'node:stream' +import * as url from 'node:url' + +import async from 'async' +import chai from 'chai' +import _ from 'lodash' +import { step } from 'mocha-steps' +import splitFile from 'split-file' +import superagent from 'superagent' +import * as uuid from 'uuid' + +import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.js' +import { + CopyDestinationOptions, + CopySourceOptions, + DEFAULT_REGION, + getVersionId, + isArray, + removeDirAndFiles, +} from '../../src/helpers.js' +import * as minio from '../../src/minio.js' + +const assert = chai.assert + +const isWindowsPlatform = process.platform === 'win32' + +describe('functional tests', function () { + this.timeout(30 * 60 * 1000) + var clientConfigParams = {} + var region_conf_env = process.env['MINIO_REGION'] + + if (process.env['SERVER_ENDPOINT']) { + var res = process.env['SERVER_ENDPOINT'].split(':') + clientConfigParams.endPoint = res[0] + clientConfigParams.port = parseInt(res[1]) + var access_Key_env = process.env['ACCESS_KEY'] + var secret_key_env = process.env['SECRET_KEY'] + + // If the user provides ENABLE_HTTPS, 1 = secure, anything else = unsecure. + // Otherwise default useSSL as true. + var enable_https_env = process.env['ENABLE_HTTPS'] + // Get the credentials from env vars, error out if they don't exist + if (access_Key_env) { + clientConfigParams.accessKey = access_Key_env + } else { + // eslint-disable-next-line no-console + console.error(`Error: ACCESS_KEY Environment variable is not set`) + process.exit(1) + } + if (secret_key_env) { + clientConfigParams.secretKey = secret_key_env + } else { + // eslint-disable-next-line no-console + console.error(`Error: SECRET_KEY Environment variable is not set`) + process.exit(1) + } + clientConfigParams.useSSL = enable_https_env == '1' + } else { + // If credentials aren't given, default to play.min.io. + clientConfigParams.endPoint = 'play.min.io' + clientConfigParams.port = 9000 + clientConfigParams.accessKey = 'Q3AM3UQ867SPQQA43P2F' + clientConfigParams.secretKey = 'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG' + clientConfigParams.useSSL = true + } + const server_region = region_conf_env || DEFAULT_REGION + + clientConfigParams.region = server_region + // set the partSize to ensure multipart upload chunk size. + // if not set, putObject with stream data and undefined length will use about 500Mb chunkSize (5Tb/10000). + clientConfigParams.partSize = 64 * 1024 * 1024 + + // dataDir is falsy if we need to generate data on the fly. Otherwise, it will be + // a directory with files to read from, i.e. /mint/data. + var dataDir = process.env['MINT_DATA_DIR'] + + var client = new minio.Client(clientConfigParams) + var usEastConfig = clientConfigParams + usEastConfig.region = server_region + var clientUsEastRegion = new minio.Client(usEastConfig) + + var traceStream + // FUNCTIONAL_TEST_TRACE env variable contains the path to which trace + // will be logged. Set it to /dev/stdout log to the stdout. + var trace_func_test_file_path = process.env['FUNCTIONAL_TEST_TRACE'] + if (trace_func_test_file_path) { + // This is necessary for windows. + if (trace_func_test_file_path === 'process.stdout') { + traceStream = process.stdout + } else { + traceStream = fs.createWriteStream(trace_func_test_file_path, { flags: 'a' }) + } + traceStream.write('====================================\n') + client.traceOn(traceStream) + } + + var bucketName = 'minio-js-test-' + uuid.v4() + var objectName = uuid.v4() + + var _1byteObjectName = 'datafile-1-b' + var _1byte = dataDir ? fs.readFileSync(dataDir + '/' + _1byteObjectName) : Buffer.alloc(1, 0) + + var _100kbObjectName = 'datafile-100-kB' + var _100kb = dataDir ? fs.readFileSync(dataDir + '/' + _100kbObjectName) : Buffer.alloc(100 * 1024, 0) + var _100kbObjectNameCopy = _100kbObjectName + '-copy' + + var _100kbObjectBufferName = `${_100kbObjectName}.buffer` + var _MultiPath100kbObjectBufferName = `path/to/${_100kbObjectName}.buffer` + var _100kbmd5 = crypto.createHash('md5').update(_100kb).digest('hex') + var _100kb1kboffsetmd5 = crypto.createHash('md5').update(_100kb.slice(1024)).digest('hex') + + var _65mbObjectName = 'datafile-65-MB' + var _65mb = dataDir ? fs.readFileSync(dataDir + '/' + _65mbObjectName) : Buffer.alloc(65 * 1024 * 1024, 0) + var _65mbmd5 = crypto.createHash('md5').update(_65mb).digest('hex') + var _65mbObjectNameCopy = _65mbObjectName + '-copy' + + var _5mbObjectName = 'datafile-5-MB' + var _5mb = dataDir ? fs.readFileSync(dataDir + '/' + _5mbObjectName) : Buffer.alloc(5 * 1024 * 1024, 0) + var _5mbmd5 = crypto.createHash('md5').update(_5mb).digest('hex') + + // create new http agent to check requests release sockets + var httpAgent = (clientConfigParams.useSSL ? https : http).Agent({ keepAlive: true }) + client.setRequestOptions({ agent: httpAgent }) + var metaData = { + 'Content-Type': 'text/html', + 'Content-Language': 'en', + 'X-Amz-Meta-Testing': 1234, + randomstuff: 5678, + } + + var tmpDir = os.tmpdir() + + function readableStream(data) { + var s = new stream.Readable() + s._read = () => {} + s.push(data) + s.push(null) + return s + } + + before((done) => client.makeBucket(bucketName, server_region, done)) + after((done) => client.removeBucket(bucketName, done)) + + if (traceStream) { + after(() => { + client.traceOff() + if (trace_func_test_file_path !== 'process.stdout') { + traceStream.end() + } + }) + } + + describe('makeBucket with period and region', () => { + if (clientConfigParams.endPoint === 's3.amazonaws.com') { + step('makeBucket(bucketName, region, cb)_region:eu-central-1_', (done) => + client.makeBucket(`${bucketName}.sec.period`, 'eu-central-1', done), + ) + step('removeBucket(bucketName, cb)__', (done) => client.removeBucket(`${bucketName}.sec.period`, done)) + } + }) + + describe('listBuckets', () => { + step('listBuckets(cb)__', (done) => { + client.listBuckets((e, buckets) => { + if (e) { + return done(e) + } + if (_.find(buckets, { name: bucketName })) { + return done() + } + done(new Error('bucket not found')) + }) + }) + step('listBuckets()__', (done) => { + client + .listBuckets() + .then((buckets) => { + if (!_.find(buckets, { name: bucketName })) { + return done(new Error('bucket not found')) + } + }) + .then(() => done()) + .catch(done) + }) + }) + + describe('makeBucket with region', () => { + let isDifferentServerRegion = false + step(`makeBucket(bucketName, region, cb)_bucketName:${bucketName}-region, region:us-east-2_`, (done) => { + try { + clientUsEastRegion.makeBucket(`${bucketName}-region`, 'us-east-2', assert.fail) + } catch (e) { + isDifferentServerRegion = true + done() + } + }) + step(`makeBucket(bucketName, region, cb)_bucketName:${bucketName}-region, region:us-east-1_`, (done) => { + if (!isDifferentServerRegion) { + clientUsEastRegion.makeBucket(`${bucketName}-region`, 'us-east-1', done) + } + done() + }) + step(`removeBucket(bucketName, cb)_bucketName:${bucketName}-region_`, (done) => { + if (!isDifferentServerRegion) { + clientUsEastRegion.removeBucket(`${bucketName}-region`, done) + } + done() + }) + step(`makeBucket(bucketName, region)_bucketName:${bucketName}-region, region:us-east-1_`, (done) => { + if (!isDifferentServerRegion) { + clientUsEastRegion.makeBucket(`${bucketName}-region`, 'us-east-1', (e) => { + if (e) { + // Some object storage servers like Azure, might not delete a bucket rightaway + // Add a sleep of 40 seconds and retry + setTimeout(() => { + clientUsEastRegion.makeBucket(`${bucketName}-region`, 'us-east-1', done) + }, 40 * 1000) + } else { + done() + } + }) + } + done() + }) + step(`removeBucket(bucketName)_bucketName:${bucketName}-region_`, (done) => { + if (!isDifferentServerRegion) { + clientUsEastRegion + .removeBucket(`${bucketName}-region`) + .then(() => done()) + .catch(done) + } + done() + }) + }) + + describe('bucketExists', () => { + step(`bucketExists(bucketName, cb)_bucketName:${bucketName}_`, (done) => client.bucketExists(bucketName, done)) + step(`bucketExists(bucketName, cb)_bucketName:${bucketName}random_`, (done) => { + client.bucketExists(bucketName + 'random', (e, exists) => { + if (e === null && !exists) { + return done() + } + done(new Error()) + }) + }) + step(`bucketExists(bucketName)_bucketName:${bucketName}_`, (done) => { + client + .bucketExists(bucketName) + .then(() => done()) + .catch(done) + }) + }) + + describe('removeBucket', () => { + step(`removeBucket(bucketName, cb)_bucketName:${bucketName}random_`, (done) => { + client.removeBucket(bucketName + 'random', (e) => { + if (e.code === 'NoSuchBucket') { + return done() + } + done(new Error()) + }) + }) + step(`makeBucket(bucketName, region)_bucketName:${bucketName}-region-1, region:us-east-1_`, (done) => { + client + .makeBucket(`${bucketName}-region-1`, '') + .then(() => client.removeBucket(`${bucketName}-region-1`)) + .then(() => done()) + .catch(done) + }) + }) + describe('tests for putObject getObject removeObject with multipath', function () { + step( + `putObject(bucketName, objectName, stream)_bucketName:${bucketName}, objectName:${_MultiPath100kbObjectBufferName}, stream:100Kib_`, + (done) => { + client + .putObject(bucketName, _MultiPath100kbObjectBufferName, _100kb) + .then(() => done()) + .catch(done) + }, + ) + + step( + `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_MultiPath100kbObjectBufferName}_`, + (done) => { + var hash = crypto.createHash('md5') + client.getObject(bucketName, _MultiPath100kbObjectBufferName, (e, stream) => { + if (e) { + return done(e) + } + stream.on('data', (data) => hash.update(data)) + stream.on('error', done) + stream.on('end', () => { + if (hash.digest('hex') === _100kbmd5) { + return done() + } + done(new Error('content mismatch')) + }) + }) + }, + ) + + step( + `removeObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_MultiPath100kbObjectBufferName}_`, + (done) => { + client + .removeObject(bucketName, _MultiPath100kbObjectBufferName) + .then(() => done()) + .catch(done) + }, + ) + }) + describe('tests for putObject copyObject getObject getPartialObject statObject removeObject', function () { + var tmpFileUpload = `${tmpDir}/${_100kbObjectName}` + step( + `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUpload}_`, + (done) => { + fs.writeFileSync(tmpFileUpload, _100kb) + client.fPutObject(bucketName, _100kbObjectName, tmpFileUpload, done) + }, + ) + + step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { + client.statObject(bucketName, _100kbObjectName, (e, stat) => { + if (e) { + return done(e) + } + // As metadata is not provided and there is no file extension, + // we default to 'application/octet-stream' as per `probeContentType` function + if (stat.metaData && stat.metaData['content-type'] !== 'application/octet-stream') { + return done(new Error('content-type mismatch')) + } + done() + }) + }) + + var tmpFileUploadWithExt = `${tmpDir}/${_100kbObjectName}.txt` + step( + `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUploadWithExt}, metaData:${metaData}_`, + (done) => { + fs.writeFileSync(tmpFileUploadWithExt, _100kb) + client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, metaData, done) + }, + ) + + step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { + client.statObject(bucketName, _100kbObjectName, (e, stat) => { + if (e) { + return done(e) + } + // As metadata is provided, even though we have an extension, + // the `content-type` should be equal what was declared on the metadata + if (stat.metaData && stat.metaData['content-type'] !== 'text/html') { + return done(new Error('content-type mismatch')) + } else if (!stat.metaData) { + return done(new Error('no metadata present')) + } + done() + }) + }) + + step( + `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUploadWithExt}_`, + (done) => { + fs.writeFileSync(tmpFileUploadWithExt, _100kb) + client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, done) + }, + ) + + step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { + client.statObject(bucketName, _100kbObjectName, (e, stat) => { + if (e) { + return done(e) + } + // As metadata is not provided but we have a file extension, + // we need to infer `content-type` from the file extension + if (stat.metaData && stat.metaData['content-type'] !== 'text/plain') { + return done(new Error('content-type mismatch')) + } + done() + }) + }) + + step( + `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}, metaData:${metaData}_`, + (done) => { + var stream = readableStream(_100kb) + client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, metaData, done) + }, + ) + + step( + `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}_`, + (done) => { + var stream = readableStream(_100kb) + client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, done) + }, + ) + + step( + `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, + (done) => { + var hash = crypto.createHash('md5') + client.getObject(bucketName, _100kbObjectName, (e, stream) => { + if (e) { + return done(e) + } + stream.on('data', (data) => hash.update(data)) + stream.on('error', done) + stream.on('end', () => { + if (hash.digest('hex') === _100kbmd5) { + return done() + } + done(new Error('content mismatch')) + }) + }) + }, + ) + + step( + `putObject(bucketName, objectName, stream, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_`, + (done) => { + client.putObject(bucketName, _100kbObjectBufferName, _100kb, '', done) + }, + ) + + step( + `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, + (done) => { + var hash = crypto.createHash('md5') + client.getObject(bucketName, _100kbObjectBufferName, (e, stream) => { + if (e) { + return done(e) + } + stream.on('data', (data) => hash.update(data)) + stream.on('error', done) + stream.on('end', () => { + if (hash.digest('hex') === _100kbmd5) { + return done() + } + done(new Error('content mismatch')) + }) + }) + }, + ) + + step( + `putObject(bucketName, objectName, stream, metaData)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_, metaData:{}`, + (done) => { + client + .putObject(bucketName, _100kbObjectBufferName, _100kb, {}) + .then(() => done()) + .catch(done) + }, + ) + + step( + `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:0, length=1024_`, + (done) => { + client + .getPartialObject(bucketName, _100kbObjectBufferName, 0, 1024) + .then((stream) => { + stream.on('data', function () {}) + stream.on('end', done) + }) + .catch(done) + }, + ) + + step( + `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024, length=1024_`, + (done) => { + var expectedHash = crypto.createHash('md5').update(_100kb.slice(1024, 2048)).digest('hex') + var hash = crypto.createHash('md5') + client + .getPartialObject(bucketName, _100kbObjectBufferName, 1024, 1024) + .then((stream) => { + stream.on('data', (data) => hash.update(data)) + stream.on('end', () => { + if (hash.digest('hex') === expectedHash) { + return done() + } + done(new Error('content mismatch')) + }) + }) + .catch(done) + }, + ) + + step( + `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024`, + (done) => { + var hash = crypto.createHash('md5') + client + .getPartialObject(bucketName, _100kbObjectBufferName, 1024) + .then((stream) => { + stream.on('data', (data) => hash.update(data)) + stream.on('end', () => { + if (hash.digest('hex') === _100kb1kboffsetmd5) { + return done() + } + done(new Error('content mismatch')) + }) + }) + .catch(done) + }, + ) + + step( + `getObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, + (done) => { + client + .getObject(bucketName, _100kbObjectBufferName) + .then((stream) => { + stream.on('data', function () {}) + stream.on('end', done) + }) + .catch(done) + }, + ) + + step( + `putObject(bucketName, objectName, stream, metadata, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, + (done) => { + var stream = readableStream(_65mb) + client.putObject(bucketName, _65mbObjectName, stream, metaData, () => { + setTimeout(() => { + if (Object.values(httpAgent.sockets).length === 0) { + return done() + } + done(new Error('http request did not release network socket')) + }, 100) + }) + }, + ) + + step(`getObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { + var hash = crypto.createHash('md5') + client.getObject(bucketName, _65mbObjectName, (e, stream) => { + if (e) { + return done(e) + } + stream.on('data', (data) => hash.update(data)) + stream.on('error', done) + stream.on('end', () => { + if (hash.digest('hex') === _65mbmd5) { + return done() + } + done(new Error('content mismatch')) + }) + }) + }) + + step(`getObject(bucketName, objectName, cb)_bucketName:${bucketName} non-existent object`, (done) => { + client.getObject(bucketName, 'an-object-that-does-not-exist', (e, stream) => { + if (stream) { + return done(new Error('on errors the stream object should not exist')) + } + if (!e) { + return done(new Error('expected an error object')) + } + if (e.code !== 'NoSuchKey') { + return done(new Error('expected NoSuchKey error')) + } + done() + }) + }) + + step( + `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}, offset:0, length:100*1024_`, + (done) => { + var hash = crypto.createHash('md5') + var expectedHash = crypto + .createHash('md5') + .update(_65mb.slice(0, 100 * 1024)) + .digest('hex') + client.getPartialObject(bucketName, _65mbObjectName, 0, 100 * 1024, (e, stream) => { + if (e) { + return done(e) + } + stream.on('data', (data) => hash.update(data)) + stream.on('error', done) + stream.on('end', () => { + if (hash.digest('hex') === expectedHash) { + return done() + } + done(new Error('content mismatch')) + }) + }) + }, + ) + + step( + `copyObject(bucketName, objectName, srcObject, cb)_bucketName:${bucketName}, objectName:${_65mbObjectNameCopy}, srcObject:/${bucketName}/${_65mbObjectName}_`, + (done) => { + client.copyObject(bucketName, _65mbObjectNameCopy, '/' + bucketName + '/' + _65mbObjectName, (e) => { + if (e) { + return done(e) + } + done() + }) + }, + ) + + step( + `copyObject(bucketName, objectName, srcObject)_bucketName:${bucketName}, objectName:${_65mbObjectNameCopy}, srcObject:/${bucketName}/${_65mbObjectName}_`, + (done) => { + client + .copyObject(bucketName, _65mbObjectNameCopy, '/' + bucketName + '/' + _65mbObjectName) + .then(() => done()) + .catch(done) + }, + ) + + step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { + client.statObject(bucketName, _65mbObjectName, (e, stat) => { + if (e) { + return done(e) + } + if (stat.size !== _65mb.length) { + return done(new Error('size mismatch')) + } + if (`${metaData.randomstuff}` !== stat.metaData.randomstuff) { + return done(new Error('metadata "randomstuff" mismatch')) + } + if (`${metaData['X-Amz-Meta-Testing']}` !== stat.metaData['testing']) { + return done(new Error('metadata "testing" mismatch')) + } + if (`${metaData['Content-Type']}` !== stat.metaData['content-type']) { + return done(new Error('metadata "content-type" mismatch')) + } + if (`${metaData['Content-Language']}` !== stat.metaData['content-language']) { + return done(new Error('metadata "content-language" mismatch')) + } + done() + }) + }) + + step(`statObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { + client + .statObject(bucketName, _65mbObjectName) + .then((stat) => { + if (stat.size !== _65mb.length) { + return done(new Error('size mismatch')) + } + }) + .then(() => done()) + .catch(done) + }) + + step(`removeObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { + client + .removeObject(bucketName, _100kbObjectName) + .then(function () { + async.map( + [_100kbObjectBufferName, _65mbObjectName, _65mbObjectNameCopy], + (objectName, cb) => client.removeObject(bucketName, objectName, cb), + done, + ) + }) + .catch(done) + }) + }) + + describe('tests for copyObject statObject', function () { + var etag + var modifiedDate + step( + `putObject(bucketName, objectName, stream, metaData, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream: 100kb, metaData:${metaData}_`, + (done) => { + client.putObject(bucketName, _100kbObjectName, _100kb, metaData, done) + }, + ) + + step( + `copyObject(bucketName, objectName, srcObject, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}_`, + (done) => { + client.copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, (e) => { + if (e) { + return done(e) + } + done() + }) + }, + ) + + step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { + client.statObject(bucketName, _100kbObjectName, (e, stat) => { + if (e) { + return done(e) + } + if (stat.size !== _100kb.length) { + return done(new Error('size mismatch')) + } + assert.equal(stat.metaData['content-type'], metaData['Content-Type']) + assert.equal(stat.metaData['Testing'], metaData['Testing']) + assert.equal(stat.metaData['randomstuff'], metaData['randomstuff']) + etag = stat.etag + modifiedDate = stat.modifiedDate + done() + }) + }) + + step( + `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:ExceptIncorrectEtag_`, + (done) => { + var conds = new minio.CopyConditions() + conds.setMatchETagExcept('TestEtag') + client.copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds, (e) => { + if (e) { + return done(e) + } + done() + }) + }, + ) + + step( + `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:ExceptCorrectEtag_`, + (done) => { + var conds = new minio.CopyConditions() + conds.setMatchETagExcept(etag) + client + .copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds) + .then(() => { + done(new Error('CopyObject should have failed.')) + }) + .catch(() => done()) + }, + ) + + step( + `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:MatchCorrectEtag_`, + (done) => { + var conds = new minio.CopyConditions() + conds.setMatchETag(etag) + client.copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds, (e) => { + if (e) { + return done(e) + } + done() + }) + }, + ) + + step( + `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:MatchIncorrectEtag_`, + (done) => { + var conds = new minio.CopyConditions() + conds.setMatchETag('TestETag') + client + .copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds) + .then(() => { + done(new Error('CopyObject should have failed.')) + }) + .catch(() => done()) + }, + ) + + step( + `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:Unmodified since ${modifiedDate}`, + (done) => { + var conds = new minio.CopyConditions() + conds.setUnmodified(new Date(modifiedDate)) + client.copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds, (e) => { + if (e) { + return done(e) + } + done() + }) + }, + ) + + step( + `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:Unmodified since 2010-03-26T12:00:00Z_`, + (done) => { + var conds = new minio.CopyConditions() + conds.setUnmodified(new Date('2010-03-26T12:00:00Z')) + client + .copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds) + .then(() => { + done(new Error('CopyObject should have failed.')) + }) + .catch(() => done()) + }, + ) + + step( + `statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}_`, + (done) => { + client.statObject(bucketName, _100kbObjectNameCopy, (e, stat) => { + if (e) { + return done(e) + } + if (stat.size !== _100kb.length) { + return done(new Error('size mismatch')) + } + done() + }) + }, + ) + + step( + `removeObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}_`, + (done) => { + async.map( + [_100kbObjectName, _100kbObjectNameCopy], + (objectName, cb) => client.removeObject(bucketName, objectName, cb), + done, + ) + }, + ) + }) + + describe('listIncompleteUploads removeIncompleteUpload', () => { + step( + `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}, metaData:${metaData}`, + (done) => { + client.initiateNewMultipartUpload(bucketName, _65mbObjectName, metaData, done) + }, + ) + step( + `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${bucketName}, prefix:${_65mbObjectName}, recursive: true_`, + function (done) { + // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. + // See: https://github.com/minio/minio/commit/75c43bfb6c4a2ace + let hostSkipList = ['s3.amazonaws.com'] + if (!hostSkipList.includes(client.host)) { + this.skip() + } + + var found = false + client + .listIncompleteUploads(bucketName, _65mbObjectName, true) + .on('error', (e) => done(e)) + .on('data', (data) => { + if (data.key === _65mbObjectName) { + found = true + } + }) + .on('end', () => { + if (found) { + return done() + } + done(new Error(`${_65mbObjectName} not found during listIncompleteUploads`)) + }) + }, + ) + step( + `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${bucketName}, recursive: true_`, + function (done) { + // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. + // See: https://github.com/minio/minio/commit/75c43bfb6c4a2ace + let hostSkipList = ['s3.amazonaws.com'] + if (!hostSkipList.includes(client.host)) { + this.skip() + } + + var found = false + client + .listIncompleteUploads(bucketName, '', true) + .on('error', (e) => done(e)) + .on('data', (data) => { + if (data.key === _65mbObjectName) { + found = true + } + }) + .on('end', () => { + if (found) { + return done() + } + done(new Error(`${_65mbObjectName} not found during listIncompleteUploads`)) + }) + }, + ) + step(`removeIncompleteUploads(bucketName, prefix)_bucketName:${bucketName}, prefix:${_65mbObjectName}_`, (done) => { + client.removeIncompleteUpload(bucketName, _65mbObjectName).then(done).catch(done) + }) + }) + + describe('fPutObject fGetObject', function () { + var tmpFileUpload = `${tmpDir}/${_65mbObjectName}` + var tmpFileDownload = `${tmpDir}/${_65mbObjectName}.download` + + step( + `fPutObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}_`, + (done) => { + fs.writeFileSync(tmpFileUpload, _65mb) + client.fPutObject(bucketName, _65mbObjectName, tmpFileUpload, () => { + setTimeout(() => { + if (Object.values(httpAgent.sockets).length === 0) { + return done() + } + done(new Error('http request did not release network socket')) + }, 100) + }) + }, + ) + + step( + `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}, metaData: ${metaData}_`, + (done) => client.fPutObject(bucketName, _65mbObjectName, tmpFileUpload, metaData, done), + ) + step( + `fGetObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileDownload}_`, + (done) => { + client + .fGetObject(bucketName, _65mbObjectName, tmpFileDownload) + .then(() => { + var md5sum = crypto.createHash('md5').update(fs.readFileSync(tmpFileDownload)).digest('hex') + if (md5sum === _65mbmd5) { + return done() + } + return done(new Error('md5sum mismatch')) + }) + .catch(done) + }, + ) + + step( + `removeObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, + (done) => { + fs.unlinkSync(tmpFileDownload) + client + .removeObject(bucketName, _65mbObjectName) + .then(() => done()) + .catch(done) + }, + ) + + step( + `fPutObject(bucketName, objectName, filePath, metaData)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}_`, + (done) => { + client + .fPutObject(bucketName, _65mbObjectName, tmpFileUpload) + .then(() => done()) + .catch(done) + }, + ) + + step( + `fGetObject(bucketName, objectName, filePath)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileDownload}_`, + (done) => { + client + .fGetObject(bucketName, _65mbObjectName, tmpFileDownload) + .then(() => done()) + .catch(done) + }, + ) + + step( + `removeObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, + (done) => { + fs.unlinkSync(tmpFileUpload) + fs.unlinkSync(tmpFileDownload) + client.removeObject(bucketName, _65mbObjectName, done) + }, + ) + }) + describe('fGetObject-resume', () => { + var localFile = `${tmpDir}/${_5mbObjectName}` + var etag = '' + step( + `putObject(bucketName, objectName, stream, metaData, cb)_bucketName:${bucketName}, objectName:${_5mbObjectName}, stream:5mb_`, + (done) => { + var stream = readableStream(_5mb) + client + .putObject(bucketName, _5mbObjectName, stream, _5mb.length, {}) + .then((resp) => { + etag = resp + done() + }) + .catch(done) + }, + ) + step( + `fGetObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_5mbObjectName}, filePath:${localFile}`, + (done) => { + var bufPart = Buffer.alloc(_100kb.length) + _5mb.copy(bufPart, 0, 0, _100kb.length) + var tmpFile = `${tmpDir}/${_5mbObjectName}.${etag}.part.minio` + // create a partial file + fs.writeFileSync(tmpFile, bufPart) + client + .fGetObject(bucketName, _5mbObjectName, localFile) + .then(() => { + var md5sum = crypto.createHash('md5').update(fs.readFileSync(localFile)).digest('hex') + if (md5sum === _5mbmd5) { + return done() + } + return done(new Error('md5sum mismatch')) + }) + .catch(done) + }, + ) + step( + `removeObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_5mbObjectName}_`, + (done) => { + fs.unlinkSync(localFile) + client.removeObject(bucketName, _5mbObjectName, done) + }, + ) + }) + + describe('bucket policy', () => { + let policy = `{"Version":"2012-10-17","Statement":[{"Action":["s3:GetBucketLocation","s3:ListBucket"],"Effect":"Allow","Principal":{"AWS":["*"]},"Resource":["arn:aws:s3:::${bucketName}"],"Sid":""},{"Action":["s3:GetObject"],"Effect":"Allow","Principal":{"AWS":["*"]},"Resource":["arn:aws:s3:::${bucketName}/*"],"Sid":""}]}` + + step(`setBucketPolicy(bucketName, bucketPolicy, cb)_bucketName:${bucketName}, bucketPolicy:${policy}_`, (done) => { + client.setBucketPolicy(bucketName, policy, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + + step(`getBucketPolicy(bucketName, cb)_bucketName:${bucketName}_`, (done) => { + client.getBucketPolicy(bucketName, (err, response) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + if (!response) { + return done(new Error(`policy is empty`)) + } + done() + }) + }) + }) + + describe('Test Remove Objects Response in case of Errors', () => { + // Since functional tests are run with root credentials, it is not implemented. + // Test steps + // ============= + // create a bucket + // add some objects + // create a user + // assign the readonly policy to the user + // use the new user credentials to call remove objects API + // verify the response + // assign the readwrite policy to the user + // call remove objects API + // verify the response + // response.Error is an array + // -[]- empty array indicates success for all objects + // Note: the response code is 200. so the consumer should inspect the response + // Sample Response format: + /** + * { + * Code: 'AccessDenied', + * Message: 'Access Denied.', + * Key: '1.png', + * VersionId: '' + * } + * + * or + * + * { + * Code: 'NoSuchVersion', + * Message: 'The specified version does not exist. (invalid UUID length: 9)', + * Key: '1.png', + * VersionId: 'test-v-is' + * } + */ + /* + let readOnlyPolicy ='{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Action":["s3:GetBucketLocation","s3:GetObject"],"Resource":["arn:aws:s3:::*"]}]}' + let readWritePolicy ='{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Action":["s3:*"],"Resource":["arn:aws:s3:::*"]}]}' + */ + }) + + describe('presigned operations', () => { + step( + `presignedPutObject(bucketName, objectName, expires, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires: 1000_`, + (done) => { + client.presignedPutObject(bucketName, _1byteObjectName, 1000, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'PUT' + options.headers = { + 'content-length': _1byte.length, + } + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 200) { + return done(new Error(`error on put : ${response.statusCode}`)) + } + response.on('error', (e) => done(e)) + response.on('end', () => done()) + response.on('data', () => {}) + }) + request.on('error', (e) => done(e)) + request.write(_1byte) + request.end() + }) + }, + ) + + step( + `presignedPutObject(bucketName, objectName, expires)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:-123_`, + (done) => { + // negative values should trigger an error + client + .presignedPutObject(bucketName, _1byteObjectName, -123) + .then(() => { + done(new Error('negative values should trigger an error')) + }) + .catch(() => done()) + }, + ) + + step( + `presignedPutObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_1byteObjectName}_`, + (done) => { + // Putting the same object should not cause any error + client + .presignedPutObject(bucketName, _1byteObjectName) + .then(() => done()) + .catch(done) + }, + ) + + step( + `presignedGetObject(bucketName, objectName, expires, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:1000_`, + (done) => { + client.presignedGetObject(bucketName, _1byteObjectName, 1000, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'GET' + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 200) { + return done(new Error(`error on put : ${response.statusCode}`)) + } + var error = null + response.on('error', (e) => done(e)) + response.on('end', () => done(error)) + response.on('data', (data) => { + if (data.toString() !== _1byte.toString()) { + error = new Error('content mismatch') + } + }) + }) + request.on('error', (e) => done(e)) + request.end() + }) + }, + ) + + step( + `presignedUrl(httpMethod, bucketName, objectName, expires, cb)_httpMethod:GET, bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:1000_`, + (done) => { + client.presignedUrl('GET', bucketName, _1byteObjectName, 1000, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'GET' + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 200) { + return done(new Error(`error on put : ${response.statusCode}`)) + } + var error = null + response.on('error', (e) => done(e)) + response.on('end', () => done(error)) + response.on('data', (data) => { + if (data.toString() !== _1byte.toString()) { + error = new Error('content mismatch') + } + }) + }) + request.on('error', (e) => done(e)) + request.end() + }) + }, + ) + + step( + `presignedUrl(httpMethod, bucketName, objectName, expires, cb)_httpMethod:GET, bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:86400, requestDate:StartOfDay_`, + (done) => { + var requestDate = new Date() + requestDate.setHours(0, 0, 0, 0) + client.presignedUrl('GET', bucketName, _1byteObjectName, 86400, requestDate, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'GET' + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 200) { + return done(new Error(`error on put : ${response.statusCode}`)) + } + var error = null + response.on('error', (e) => done(e)) + response.on('end', () => done(error)) + response.on('data', (data) => { + if (data.toString() !== _1byte.toString()) { + error = new Error('content mismatch') + } + }) + }) + request.on('error', (e) => done(e)) + request.end() + }) + }, + ) + + step( + `presignedGetObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}_`, + (done) => { + client.presignedGetObject(bucketName, _1byteObjectName, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'GET' + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 200) { + return done(new Error(`error on put : ${response.statusCode}`)) + } + var error = null + response.on('error', (e) => done(e)) + response.on('end', () => done(error)) + response.on('data', (data) => { + if (data.toString() !== _1byte.toString()) { + error = new Error('content mismatch') + } + }) + }) + request.on('error', (e) => done(e)) + request.end() + }) + }, + ) + + step( + `presignedGetObject(bucketName, objectName, expires)_bucketName:${bucketName}, objectName:this.does.not.exist, expires:2938_`, + (done) => { + client + .presignedGetObject(bucketName, 'this.does.not.exist', 2938) + .then(assert.fail) + .catch(() => done()) + }, + ) + + step( + `presignedGetObject(bucketName, objectName, expires, respHeaders, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:1000_`, + (done) => { + var respHeaders = { + 'response-content-type': 'text/html', + 'response-content-language': 'en', + 'response-expires': 'Sun, 07 Jun 2020 16:07:58 GMT', + 'response-cache-control': 'No-cache', + 'response-content-disposition': 'attachment; filename=testing.txt', + 'response-content-encoding': 'gzip', + } + client.presignedGetObject(bucketName, _1byteObjectName, 1000, respHeaders, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'GET' + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 200) { + return done(new Error(`error on get : ${response.statusCode}`)) + } + if (respHeaders['response-content-type'] !== response.headers['content-type']) { + return done(new Error(`content-type header mismatch`)) + } + if (respHeaders['response-content-language'] !== response.headers['content-language']) { + return done(new Error(`content-language header mismatch`)) + } + if (respHeaders['response-expires'] !== response.headers['expires']) { + return done(new Error(`expires header mismatch`)) + } + if (respHeaders['response-cache-control'] !== response.headers['cache-control']) { + return done(new Error(`cache-control header mismatch`)) + } + if (respHeaders['response-content-disposition'] !== response.headers['content-disposition']) { + return done(new Error(`content-disposition header mismatch`)) + } + if (respHeaders['response-content-encoding'] !== response.headers['content-encoding']) { + return done(new Error(`content-encoding header mismatch`)) + } + response.on('data', () => {}) + done() + }) + request.on('error', (e) => done(e)) + request.end() + }) + }, + ) + + step( + `presignedGetObject(bucketName, objectName, respHeaders, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, contentDisposition special chars`, + (done) => { + var respHeaders = { + 'response-content-disposition': + 'attachment; filename="abc|"@#$%&/(<>)/=?!{[\']}+*-_:,;def.png"; filename*=UTF-8\'\'t&21st&20ng.png', + } + client.presignedGetObject(bucketName, _1byteObjectName, 1000, respHeaders, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'GET' + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 200) { + return done(new Error(`error on get : ${response.statusCode}`)) + } + if (respHeaders['response-content-disposition'] !== response.headers['content-disposition']) { + return done(new Error(`content-disposition header mismatch`)) + } + response.on('data', () => {}) + done() + }) + request.on('error', (e) => done(e)) + request.end() + }) + }, + ) + + step( + `presignedGetObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:86400, requestDate:StartOfDay_`, + (done) => { + var requestDate = new Date() + requestDate.setHours(0, 0, 0, 0) + client.presignedGetObject(bucketName, _1byteObjectName, 86400, {}, requestDate, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'GET' + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 200) { + return done(new Error(`error on put : ${response.statusCode}`)) + } + var error = null + response.on('error', (e) => done(e)) + response.on('end', () => done(error)) + response.on('data', (data) => { + if (data.toString() !== _1byte.toString()) { + error = new Error('content mismatch') + } + }) + }) + request.on('error', (e) => done(e)) + request.end() + }) + }, + ) + + step('presignedPostPolicy(postPolicy, cb)_postPolicy:expiresin10days_', (done) => { + var policy = client.newPostPolicy() + policy.setKey(_1byteObjectName) + policy.setBucket(bucketName) + var expires = new Date() + expires.setSeconds(24 * 60 * 60 * 10) + policy.setExpires(expires) + + client.presignedPostPolicy(policy, (e, data) => { + if (e) { + return done(e) + } + var req = superagent.post(data.postURL) + _.each(data.formData, (value, key) => req.field(key, value)) + req.attach('file', Buffer.from([_1byte]), 'test') + req.end(function (e) { + if (e) { + return done(e) + } + done() + }) + req.on('error', (e) => done(e)) + }) + }) + + step('presignedPostPolicy(postPolicy, cb)_postPolicy:setContentType', (done) => { + var policy = client.newPostPolicy() + policy.setKey(_1byteObjectName) + policy.setBucket(bucketName) + policy.setContentType('text/plain') + + client.presignedPostPolicy(policy, (e, data) => { + if (e) { + return done(e) + } + var req = superagent.post(data.postURL) + _.each(data.formData, (value, key) => req.field(key, value)) + req.attach('file', Buffer.from([_1byte]), 'test') + req.end(function (e) { + if (e) { + return done(e) + } + done() + }) + req.on('error', (e) => done(e)) + }) + }) + + step('presignedPostPolicy(postPolicy, cb)_postPolicy:setContentTypeStartsWith', (done) => { + var policy = client.newPostPolicy() + policy.setKey(_1byteObjectName) + policy.setBucket(bucketName) + policy.setContentTypeStartsWith('text/') + + client.presignedPostPolicy(policy, (e, data) => { + if (e) { + return done(e) + } + var req = superagent.post(data.postURL) + _.each(data.formData, (value, key) => req.field(key, value)) + req.attach('file', Buffer.from([_1byte]), 'test') + req.end(function (e) { + if (e) { + return done(e) + } + done() + }) + req.on('error', (e) => done(e)) + }) + }) + + step('presignedPostPolicy(postPolicy, cb)_postPolicy:setContentDisposition_inline', (done) => { + var policy = client.newPostPolicy() + var objectName = 'test-content-disposition' + uuid.v4() + policy.setKey(objectName) + policy.setBucket(bucketName) + policy.setContentDisposition('inline') + + client.presignedPostPolicy(policy, (e, data) => { + if (e) { + return done(e) + } + var req = superagent.post(data.postURL) + _.each(data.formData, (value, key) => req.field(key, value)) + req.attach('file', Buffer.from([_1byte]), 'test') + req.end(function (e) { + if (e) { + return done(e) + } + client.removeObject(bucketName, objectName, done) + }) + req.on('error', (e) => done(e)) + }) + }) + + step('presignedPostPolicy(postPolicy, cb)_postPolicy:setContentDisposition_attachment', (done) => { + var policy = client.newPostPolicy() + var objectName = 'test-content-disposition' + uuid.v4() + policy.setKey(objectName) + policy.setBucket(bucketName) + policy.setContentDisposition('attachment; filename= My* Docume! nt.json') + + client.presignedPostPolicy(policy, (e, data) => { + if (e) { + return done(e) + } + var req = superagent.post(data.postURL) + _.each(data.formData, (value, key) => req.field(key, value)) + req.attach('file', Buffer.from([_1byte]), 'test') + req.end(function (e) { + if (e) { + return done(e) + } + client.removeObject(bucketName, objectName, done) + }) + req.on('error', (e) => done(e)) + }) + }) + + step('presignedPostPolicy(postPolicy, cb)_postPolicy:setUserMetaData_', (done) => { + var policy = client.newPostPolicy() + var objectName = 'test-metadata' + uuid.v4() + policy.setKey(objectName) + policy.setBucket(bucketName) + policy.setUserMetaData({ + key: 'my-value', + anotherKey: 'another-value', + }) + + client.presignedPostPolicy(policy, (e, data) => { + if (e) { + return done(e) + } + var req = superagent.post(data.postURL) + _.each(data.formData, (value, key) => req.field(key, value)) + req.attach('file', Buffer.from([_1byte]), 'test') + req.end(function (e) { + if (e) { + return done(e) + } + client.removeObject(bucketName, objectName, done) + }) + req.on('error', (e) => done(e)) + }) + }) + + step('presignedPostPolicy(postPolicy)_postPolicy: null_', (done) => { + client + .presignedPostPolicy(null) + .then(() => { + done(new Error('null policy should fail')) + }) + .catch(() => done()) + }) + + step( + `presignedUrl(httpMethod, bucketName, objectName, expires, reqParams, cb)_httpMethod:GET, bucketName:${bucketName}, expires:1000_`, + (done) => { + client.presignedUrl('GET', bucketName, '', 1000, { prefix: 'data', 'max-keys': 1000 }, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'GET' + options.headers = {} + var str = '' + if (options.protocol === 'https:') { + transport = https + } + var callback = function (response) { + if (response.statusCode !== 200) { + return done(new Error(`error on put : ${response.statusCode}`)) + } + response.on('error', (e) => done(e)) + response.on('end', function () { + if (!str.match(`${_1byteObjectName}`)) { + return done(new Error('Listed object does not match the object in the bucket!')) + } + done() + }) + response.on('data', function (chunk) { + str += chunk + }) + } + var request = transport.request(options, callback) + request.end() + }) + }, + ) + + step( + `presignedUrl(httpMethod, bucketName, objectName, expires, cb)_httpMethod:DELETE, bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:1000_`, + (done) => { + client.presignedUrl('DELETE', bucketName, _1byteObjectName, 1000, (e, presignedUrl) => { + if (e) { + return done(e) + } + var transport = http + var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + options.method = 'DELETE' + options.headers = {} + if (options.protocol === 'https:') { + transport = https + } + var request = transport.request(options, (response) => { + if (response.statusCode !== 204) { + return done(new Error(`error on put : ${response.statusCode}`)) + } + response.on('error', (e) => done(e)) + response.on('end', () => done()) + response.on('data', () => {}) + }) + request.on('error', (e) => done(e)) + request.end() + }) + }, + ) + }) + + describe('listObjects', function () { + var listObjectPrefix = 'miniojsPrefix' + var listObjectsNum = 10 + var objArray = [] + var listArray = [] + var listPrefixArray = [] + + step( + `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, + (done) => { + _.times(listObjectsNum, (i) => objArray.push(`${listObjectPrefix}.${i}`)) + objArray = objArray.sort() + async.mapLimit( + objArray, + 20, + (objectName, cb) => client.putObject(bucketName, objectName, readableStream(_1byte), _1byte.length, {}, cb), + done, + ) + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${bucketName}, prefix: miniojsprefix, recursive:true_`, + (done) => { + client + .listObjects(bucketName, listObjectPrefix, true) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray, listPrefixArray)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data.name) + }) + }, + ) + + step('listObjects(bucketName, prefix, recursive)_recursive:true_', (done) => { + try { + client.listObjects('', '', true).on('end', () => { + return done(new Error(`listObjects should throw exception when empty bucketname is passed`)) + }) + } catch (e) { + if (e.name === 'InvalidBucketNameError') { + done() + } else { + done(e) + } + } + }) + + step(`listObjects(bucketName, prefix, recursive)_bucketName:${bucketName}, recursive:false_`, (done) => { + listArray = [] + client + .listObjects(bucketName, '', false) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray, listArray)) { + return done() + } + return done(new Error(`listObjects lists ${listArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listArray.push(data.name) + }) + }) + + step( + `listObjectsV2(bucketName, prefix, recursive, startAfter)_bucketName:${bucketName}, recursive:true_`, + (done) => { + listArray = [] + client + .listObjectsV2(bucketName, '', true, '') + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray, listArray)) { + return done() + } + return done(new Error(`listObjects lists ${listArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listArray.push(data.name) + }) + }, + ) + + step( + `listObjectsV2WithMetadata(bucketName, prefix, recursive, startAfter)_bucketName:${bucketName}, recursive:true_`, + (done) => { + listArray = [] + client.extensions + .listObjectsV2WithMetadata(bucketName, '', true, '') + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray, listArray)) { + return done() + } + return done(new Error(`listObjects lists ${listArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listArray.push(data.name) + }) + }, + ) + + step( + `removeObject(bucketName, objectName, callback)_bucketName:${bucketName}_Remove ${listObjectsNum} objects`, + (done) => { + async.mapLimit(listArray, 20, (objectName, cb) => client.removeObject(bucketName, objectName, cb), done) + }, + ) + }) + + describe('removeObjects', function () { + var listObjectPrefix = 'miniojsPrefix' + var listObjectsNum = 10 + var objArray = [] + var objectsList = [] + + step( + `putObject(bucketName, objectName, stream, size, contentType, callback)_bucketName:${bucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, + (done) => { + _.times(listObjectsNum, (i) => objArray.push(`${listObjectPrefix}.${i}`)) + objArray = objArray.sort() + async.mapLimit( + objArray, + 20, + (objectName, cb) => client.putObject(bucketName, objectName, readableStream(_1byte), _1byte.length, '', cb), + done, + ) + }, + ) + + step(`listObjects(bucketName, prefix, recursive)_bucketName:${bucketName}, recursive:false_`, (done) => { + client + .listObjects(bucketName, listObjectPrefix, false) + .on('error', done) + .on('end', () => { + try { + client.removeObjects(bucketName, '', function (e) { + if (e) { + done() + } + }) + } catch (e) { + if (e.name === 'InvalidArgumentError') { + done() + } + } + }) + .on('data', (data) => { + objectsList.push(data.name) + }) + }) + + objectsList = [] + + step(`listObjects(bucketName, prefix, recursive)_bucketName:${bucketName}, recursive:false_`, (done) => { + client + .listObjects(bucketName, listObjectPrefix, false) + .on('error', done) + .on('end', () => { + client.removeObjects(bucketName, objectsList, function (e) { + if (e) { + done(e) + } + done() + }) + }) + .on('data', (data) => { + objectsList.push(data.name) + }) + }) + + // Non latin characters + step(`putObject(bucketName, objectName, stream)_bucketName:${bucketName}, objectName:fileΩ, stream:1b`, (done) => { + client + .putObject(bucketName, 'fileΩ', _1byte) + .then(() => done()) + .catch(done) + }) + + step(`removeObjects with non latin characters`, (done) => { + client + .removeObjects(bucketName, ['fileΩ']) + .then(() => done()) + .catch(done) + }) + }) + + describe('bucket notifications', () => { + describe('#listenBucketNotification', () => { + before(function () { + // listenBucketNotification only works on MinIO, so skip if + // the host is Amazon. + let hostSkipList = ['s3.amazonaws.com'] + if (hostSkipList.includes(client.host)) { + this.skip() + } + }) + + step( + `listenBucketNotification(bucketName, prefix, suffix, events)_bucketName:${bucketName}, prefix:photos/, suffix:.jpg, events:bad_`, + (done) => { + let poller = client.listenBucketNotification(bucketName, 'photos/', '.jpg', ['bad']) + poller.on('error', (error) => { + if (error.code !== 'NotImplemented') { + assert.match(error.message, /A specified event is not supported for notifications./) + assert.equal(error.code, 'InvalidArgument') + } + done() + }) + }, + ) + step( + `listenBucketNotification(bucketName, prefix, suffix, events)_bucketName:${bucketName}, events: s3:ObjectCreated:*_`, + (done) => { + let poller = client.listenBucketNotification(bucketName, '', '', ['s3:ObjectCreated:*']) + let records = 0 + let pollerError = null + poller.on('notification', (record) => { + records++ + + assert.equal(record.eventName, 's3:ObjectCreated:Put') + assert.equal(record.s3.bucket.name, bucketName) + assert.equal(record.s3.object.key, objectName) + }) + poller.on('error', (error) => { + pollerError = error + }) + setTimeout(() => { + // Give it some time for the notification to be setup. + if (pollerError) { + if (pollerError.code !== 'NotImplemented') { + done(pollerError) + } else { + done() + } + return + } + client.putObject(bucketName, objectName, 'stringdata', (err) => { + if (err) { + return done(err) + } + setTimeout(() => { + // Give it some time to get the notification. + poller.stop() + client.removeObject(bucketName, objectName, (err) => { + if (err) { + return done(err) + } + if (!records) { + return done(new Error('notification not received')) + } + done() + }) + }, 10 * 1000) + }) + }, 10 * 1000) + }, + ) + + // This test is very similar to that above, except it does not include + // Minio.ObjectCreatedAll in the config. Thus, no events should be emitted. + step( + `listenBucketNotification(bucketName, prefix, suffix, events)_bucketName:${bucketName}, events:s3:ObjectRemoved:*`, + (done) => { + let poller = client.listenBucketNotification(bucketName, '', '', ['s3:ObjectRemoved:*']) + poller.on('notification', assert.fail) + poller.on('error', (error) => { + if (error.code !== 'NotImplemented') { + done(error) + } + }) + + client.putObject(bucketName, objectName, 'stringdata', (err) => { + if (err) { + return done(err) + } + // It polls every five seconds, so wait for two-ish polls, then end. + setTimeout(() => { + poller.stop() + poller.removeAllListeners('notification') + // clean up object now + client.removeObject(bucketName, objectName, done) + }, 11 * 1000) + }) + }, + ) + }) + }) + + describe('Bucket Versioning API', () => { + // Isolate the bucket/object for easy debugging and tracking. + const versionedBucketName = 'minio-js-test-version-' + uuid.v4() + before((done) => client.makeBucket(versionedBucketName, '', done)) + after((done) => client.removeBucket(versionedBucketName, done)) + + describe('Versioning Steps test', function () { + step('Check if versioning is enabled on a bucket', (done) => { + client.getBucketVersioning(versionedBucketName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + step('Enable versioning on a bucket', (done) => { + client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + + step('Suspend versioning on a bucket', (done) => { + client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + + step('Check if versioning is Suspended on a bucket', (done) => { + client.getBucketVersioning(versionedBucketName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + }) + }) + + describe('Versioning tests on a buckets', function () { + // Isolate the bucket/object for easy debugging and tracking. + const versionedBucketName = 'minio-js-test-version-' + uuid.v4() + const versioned_100kbObjectName = 'datafile-100-kB' + const versioned_100kb_Object = dataDir + ? fs.readFileSync(dataDir + '/' + versioned_100kbObjectName) + : Buffer.alloc(100 * 1024, 0) + + before((done) => client.makeBucket(versionedBucketName, '', done)) + after((done) => client.removeBucket(versionedBucketName, done)) + + describe('Versioning Steps test', function () { + let versionId + + step( + `setBucketVersioning(bucketName, versionConfig):_bucketName:${versionedBucketName},versionConfig:{Status:"Enabled"} `, + (done) => { + client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }, + ) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, + (done) => { + client + .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) + .then(() => done()) + .catch(done) + }, + ) + + step( + `statObject(bucketName, objectName, statOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, + (done) => { + client.statObject(versionedBucketName, versioned_100kbObjectName, {}, (e, res) => { + versionId = res.versionId + done() + }) + }, + ) + + step( + `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, + (done) => { + client.removeObject(versionedBucketName, versioned_100kbObjectName, { versionId: versionId }, () => { + done() + }) + }, + ) + + step( + `setBucketVersioning(bucketName, versionConfig):_bucketName:${versionedBucketName},versionConfig:{Status:"Suspended"}`, + (done) => { + client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }, + ) + }) + }) + + describe('Versioning tests on a buckets: getObject, fGetObject, getPartialObject, putObject, removeObject with versionId support', function () { + // Isolate the bucket/object for easy debugging and tracking. + const versionedBucketName = 'minio-js-test-version-' + uuid.v4() + const versioned_100kbObjectName = 'datafile-100-kB' + const versioned_100kb_Object = dataDir + ? fs.readFileSync(dataDir + '/' + versioned_100kbObjectName) + : Buffer.alloc(100 * 1024, 0) + + before((done) => client.makeBucket(versionedBucketName, '', done)) + after((done) => client.removeBucket(versionedBucketName, done)) + + describe('Versioning Test for getObject, getPartialObject, putObject, removeObject with versionId support', function () { + let versionId = null + step( + `Enable Versioning on Bucket: setBucketVersioning(bucketName,versioningConfig)_bucketName:${versionedBucketName},{Status:"Enabled"}`, + (done) => { + client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }, + ) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, + (done) => { + client + .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) + .then((res = {}) => { + if (res.versionId) { + versionId = res.versionId // In gateway mode versionId will not be returned. + } + done() + }) + .catch(done) + }, + ) + + step( + `getObject(bucketName, objectName, getOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, + (done) => { + if (versionId) { + client.getObject( + versionedBucketName, + versioned_100kbObjectName, + { versionId: versionId }, + function (e, dataStream) { + const objVersion = getVersionId(dataStream.headers) + if (objVersion) { + done() + } else { + done(new Error('versionId not found in getObject response')) + } + }, + ) + } else { + done() + } + }, + ) + + step( + `fGetObject(bucketName, objectName, filePath, getOpts={})_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, + (done) => { + if (versionId) { + var tmpFileDownload = `${tmpDir}/${versioned_100kbObjectName}.download` + client.fGetObject( + versionedBucketName, + versioned_100kbObjectName, + tmpFileDownload, + { versionId: versionId }, + function () { + done() + }, + ) + } else { + done() + } + }, + ) + + step( + `getPartialObject(bucketName, objectName, offset, length, getOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, + (done) => { + if (versionId) { + client.getPartialObject( + versionedBucketName, + versioned_100kbObjectName, + 10, + 30, + { versionId: versionId }, + function (e, dataStream) { + const objVersion = getVersionId(dataStream.headers) + if (objVersion) { + done() + } else { + done(new Error('versionId not found in getPartialObject response')) + } + }, + ) + } else { + done() + } + }, + ) + + step( + `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName},removeOpts:{versionId:${versionId}`, + (done) => { + if (versionId) { + client.removeObject(versionedBucketName, versioned_100kbObjectName, { versionId: versionId }, () => { + done() + }) + } else { + // In gateway mode, use regular delete to remove an object so that the bucket can be cleaned up. + client.removeObject(versionedBucketName, versioned_100kbObjectName, () => { + done() + }) + } + }, + ) + + step( + `setBucketVersioning(bucketName, versionConfig):_bucketName:${versionedBucketName},versionConfig:{Status:"Suspended"}`, + (done) => { + client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }, + ) + }) + }) + + describe('Versioning Supported listObjects', function () { + const versionedBucketName = 'minio-js-test-version-list' + uuid.v4() + const prefixName = 'Prefix1' + const versionedObjectName = 'datafile-100-kB' + const objVersionIdCounter = [1, 2, 3, 4, 5] // This should track adding 5 versions of the same object. + let listObjectsNum = objVersionIdCounter.length + let objArray = [] + let listPrefixArray = [] + let isVersioningSupported = false + + const objNameWithPrefix = `${prefixName}/${versionedObjectName}` + + before((done) => + client.makeBucket(versionedBucketName, '', () => { + client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + isVersioningSupported = true + done() + }) + }), + ) + after((done) => client.removeBucket(versionedBucketName, done)) + + step( + `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${versionedBucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, + (done) => { + if (isVersioningSupported) { + let count = 1 + objVersionIdCounter.forEach(() => { + client.putObject( + versionedBucketName, + objNameWithPrefix, + readableStream(_1byte), + _1byte.length, + {}, + (e, data) => { + objArray.push(data) + if (count === objVersionIdCounter.length) { + done() + } + count += 1 + }, + ) + }) + } else { + done() + } + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, + (done) => { + if (isVersioningSupported) { + client + .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray.length, listPrefixArray.length)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data) + }) + } else { + done() + } + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: ${prefixName}, recursive:true_`, + (done) => { + if (isVersioningSupported) { + listPrefixArray = [] + client + .listObjects(versionedBucketName, prefixName, true, { IncludeVersion: true }) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray.length, listPrefixArray.length)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data) + }) + } else { + done() + } + }, + ) + + step( + `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}_Remove ${listObjectsNum} objects`, + (done) => { + if (isVersioningSupported) { + let count = 1 + listPrefixArray.forEach((item) => { + client.removeObject(versionedBucketName, item.name, { versionId: item.versionId }, () => { + if (count === listPrefixArray.length) { + done() + } + count += 1 + }) + }) + } else { + done() + } + }, + ) + }) + + describe('Versioning tests on a bucket for Deletion of Multiple versions', function () { + // Isolate the bucket/object for easy debugging and tracking. + const versionedBucketName = 'minio-js-test-version-' + uuid.v4() + const versioned_100kbObjectName = 'datafile-100-kB' + const versioned_100kb_Object = dataDir + ? fs.readFileSync(dataDir + '/' + versioned_100kbObjectName) + : Buffer.alloc(100 * 1024, 0) + + before((done) => client.makeBucket(versionedBucketName, '', done)) + after((done) => client.removeBucket(versionedBucketName, done)) + + describe('Test for removal of multiple versions', function () { + let isVersioningSupported = false + const objVersionList = [] + step( + `setBucketVersioning(bucketName, versionConfig):_bucketName:${versionedBucketName},versionConfig:{Status:"Enabled"} `, + (done) => { + client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + isVersioningSupported = true + done() + }) + }, + ) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, + (done) => { + if (isVersioningSupported) { + client + .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + // Put two versions of the same object. + step( + `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, + (done) => { + // Put two versions of the same object. + if (isVersioningSupported) { + client + .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, + (done) => { + if (isVersioningSupported) { + client + .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) + .on('error', done) + .on('end', () => { + if (_.isEqual(2, objVersionList.length)) { + return done() + } + return done(new Error(`listObjects lists ${objVersionList.length} objects, expected ${2}`)) + }) + .on('data', (data) => { + // Pass list object response as is to remove objects + objVersionList.push(data) + }) + } else { + done() + } + }, + ) + + step( + `removeObjects(bucketName, objectList, removeOpts)_bucketName:${versionedBucketName}_Remove ${objVersionList.length} objects`, + (done) => { + if (isVersioningSupported) { + let count = 1 + objVersionList.forEach(() => { + // remove multiple versions of the object. + client.removeObjects(versionedBucketName, objVersionList, () => { + if (count === objVersionList.length) { + done() + } + count += 1 + }) + }) + } else { + done() + } + }, + ) + }) + }) + + describe('Bucket Tags API', () => { + // Isolate the bucket/object for easy debugging and tracking. + const tagsBucketName = 'minio-js-test-tags-' + uuid.v4() + before((done) => client.makeBucket(tagsBucketName, '', done)) + after((done) => client.removeBucket(tagsBucketName, done)) + + describe('set, get and remove Tags on a bucket', function () { + step(`Set tags on a bucket_bucketName:${tagsBucketName}`, (done) => { + client.setBucketTagging(tagsBucketName, { 'test-tag-key': 'test-tag-value' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + step(`Get tags on a bucket_bucketName:${tagsBucketName}`, (done) => { + client.getBucketTagging(tagsBucketName, (err, tagList) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + if (isArray(tagList)) { + done() + } + }) + }) + + step(`remove Tags on a bucket_bucketName:${tagsBucketName}`, (done) => { + client.removeBucketTagging(tagsBucketName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + }) + }) + + describe('Object Tags API', () => { + // Isolate the bucket/object for easy debugging and tracking. + const tagsBucketName = 'minio-js-test-tags-' + uuid.v4() + before((done) => client.makeBucket(tagsBucketName, '', done)) + after((done) => client.removeBucket(tagsBucketName, done)) + + const tagObjName = 'datafile-100-kB' + const tagObject = Buffer.alloc(100 * 1024, 0) + + describe('set, get and remove Tags on an object', function () { + step( + `putObject(bucketName, objectName, stream)_bucketName:${tagsBucketName}, objectName:${tagObjName}, stream:100Kib_`, + (done) => { + client + .putObject(tagsBucketName, tagObjName, tagObject) + .then(() => done()) + .catch(done) + }, + ) + + step(`putObjectTagging object_bucketName:${tagsBucketName}, objectName:${tagObjName},`, (done) => { + client.setObjectTagging(tagsBucketName, tagObjName, { 'test-tag-key-obj': 'test-tag-value-obj' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + + step(`getObjectTagging object_bucketName:${tagsBucketName}, objectName:${tagObjName},`, (done) => { + client.getObjectTagging(tagsBucketName, tagObjName, (err, tagList) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + if (isArray(tagList)) { + done() + } + }) + }) + + step(`removeObjectTagging on an object_bucketName:${tagsBucketName}, objectName:${tagObjName},`, (done) => { + client.removeObjectTagging(tagsBucketName, tagObjName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + step(`removeObject object_bucketName:${tagsBucketName}, objectName:${tagObjName},`, (done) => { + client.removeObject(tagsBucketName, tagObjName, () => { + done() + }) + }) + }) + }) + + describe('Object Tags API with Versioning support', () => { + // Isolate the bucket/object for easy debugging and tracking. + const tagsVersionedBucketName = 'minio-js-test-tags-version-' + uuid.v4() + before((done) => client.makeBucket(tagsVersionedBucketName, '', done)) + after((done) => client.removeBucket(tagsVersionedBucketName, done)) + + const tagObjName = 'datafile-100-kB' + const tagObject = Buffer.alloc(100 * 1024, 0) + let isVersioningSupported = false + let versionId = null + + describe('set, get and remove Tags on a versioned object', function () { + step( + `Enable Versioning on Bucket: setBucketVersioning(bucketName,versioningConfig)_bucketName:${tagsVersionedBucketName},{Status:"Enabled"}`, + (done) => { + client.setBucketVersioning(tagsVersionedBucketName, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + isVersioningSupported = true + done() + }) + }, + ) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName}, stream:100Kib_`, + (done) => { + if (isVersioningSupported) { + client + .putObject(tagsVersionedBucketName, tagObjName, tagObject) + .then((res = {}) => { + if (res.versionId) { + versionId = res.versionId // In gateway mode versionId will not be returned. + } + done() + }) + .catch(done) + } else { + done() + } + }, + ) + + step(`Set tags on an object_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName},`, (done) => { + if (isVersioningSupported) { + client.setObjectTagging( + tagsVersionedBucketName, + tagObjName, + { 'test-tag-key-obj': 'test-tag-value-obj' }, + { versionId: versionId }, + (err) => { + if (err) { + return done(err) + } + done() + }, + ) + } else { + done() + } + }) + + step(`Get tags on an object_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName},`, (done) => { + if (isVersioningSupported) { + client.getObjectTagging(tagsVersionedBucketName, tagObjName, { versionId: versionId }, (err, tagList) => { + if (err) { + return done(err) + } + if (isArray(tagList)) { + done() + } + }) + } else { + done() + } + }) + + step(`remove Tags on an object_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName},`, (done) => { + if (isVersioningSupported) { + client.removeObjectTagging(tagsVersionedBucketName, tagObjName, { versionId: versionId }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + } else { + done() + } + }) + step(`remove Tags on an object_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName},`, (done) => { + if (isVersioningSupported) { + client.removeObject(tagsVersionedBucketName, tagObjName, { versionId: versionId }, () => { + done() + }) + } else { + done() + } + }) + }) + }) + + describe('Bucket Lifecycle API', () => { + const bucketName = 'minio-js-test-lifecycle-' + uuid.v4() + before((done) => client.makeBucket(bucketName, '', done)) + after((done) => client.removeBucket(bucketName, done)) + + describe('Set, Get Lifecycle config Tests', function () { + step(`Set lifecycle config on a bucket:_bucketName:${bucketName}`, (done) => { + const lifecycleConfig = { + Rule: [ + { + ID: 'Transition and Expiration Rule', + Status: 'Enabled', + Filter: { + Prefix: '', + }, + Expiration: { + Days: '3650', + }, + }, + ], + } + client.setBucketLifecycle(bucketName, lifecycleConfig, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + + step('Set lifecycle config of a bucket', (done) => { + client.getBucketLifecycle(bucketName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + + step('Remove lifecycle config of a bucket', (done) => { + client.removeBucketLifecycle(bucketName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + }) + }) + + describe('Versioning Supported preSignedUrl Get, Put Tests', function () { + /** + * Test Steps + * 1. Create Versioned Bucket + * 2. presignedPutObject of 2 Versions of different size + * 3. List and ensure that there are two versions + * 4. presignedGetObject with versionId to ensure that we are able to get + * 5. Remove all object versions at once + * 6. Cleanup bucket. + */ + + const versionedBucketName = 'minio-js-test-ver-presign-' + uuid.v4() + const versionedPresignObjName = 'datafile-1-b' + const _100_byte = Buffer.alloc(100 * 1024, 0) + const _200_byte = Buffer.alloc(200 * 1024, 0) + let isVersioningSupported = false + const objectsList = [] + const expectedVersionsCount = 2 + + before((done) => + client.makeBucket(versionedBucketName, '', () => { + client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + isVersioningSupported = true + done() + }) + }), + ) + after((done) => client.removeBucket(versionedBucketName, done)) + + step( + `presignedPutObject(bucketName, objectName, expires=1000, cb)_bucketName:${versionedBucketName} ${versionedPresignObjName} _version:1`, + (done) => { + if (isVersioningSupported) { + client.presignedPutObject(versionedBucketName, versionedPresignObjName, 1000, (e, presignedUrl) => { + if (e) { + done(e) + } + let mobileClientReqWithProtocol = http + var upldRequestOptions = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + upldRequestOptions.method = 'PUT' + upldRequestOptions.headers = { + 'content-length': _100_byte.length, + } + if (upldRequestOptions.protocol === 'https:') { + mobileClientReqWithProtocol = https + } + const uploadRequest = mobileClientReqWithProtocol.request(upldRequestOptions, (response) => { + if (response.statusCode !== 200) { + return new Error(`error on put : ${response.statusCode}`) + } + response.on('error', (err) => { + done(err) + }) + response.on('end', () => { + done() + }) + response.on('data', () => { + // just drain + }) + }) + + uploadRequest.on('error', (er) => { + done(er) + }) + + uploadRequest.write(_100_byte) + uploadRequest.end() + }) + } else { + done() + } + }, + ) + + step( + `presignedPutObject(bucketName, objectName, expires=1000, cb)_bucketName:${versionedBucketName} ${versionedPresignObjName} _version:2`, + (done) => { + if (isVersioningSupported) { + client.presignedPutObject(versionedBucketName, versionedPresignObjName, 1000, (e, presignedUrl) => { + if (e) { + done(e) + } + let mobileClientReqWithProtocol = http + var upldRequestOptions = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + upldRequestOptions.method = 'PUT' + upldRequestOptions.headers = { + 'content-length': _200_byte.length, + } + if (upldRequestOptions.protocol === 'https:') { + mobileClientReqWithProtocol = https + } + const uploadRequest = mobileClientReqWithProtocol.request(upldRequestOptions, (response) => { + if (response.statusCode !== 200) { + return new Error(`error on put : ${response.statusCode}`) + } + response.on('error', (err) => { + done(err) + }) + response.on('end', () => { + done() + }) + response.on('data', () => { + // just drain + }) + }) + + uploadRequest.on('error', (er) => { + done(er) + }) + + uploadRequest.write(_200_byte) + uploadRequest.end() + }) + } else { + done() + } + }, + ) + + step( + `listObjects(bucketName, '', true, {IncludeVersion: true}, cb)_bucketName:${versionedBucketName} _prefix:""`, + (done) => { + if (isVersioningSupported) { + const objectsStream = client.listObjects(versionedBucketName, '', true, { IncludeVersion: true }) + objectsStream.on('data', function (obj) { + objectsList.push({ versionId: obj.versionId, name: obj.name }) + }) + + objectsStream.on('error', function () { + return done() + }) + objectsStream.on('end', function () { + const objectListCount = objectsList.length + if (objectListCount === expectedVersionsCount) { + done() + } else { + return done( + new Error(`Version count does not match for versioned presigned url test. ${expectedVersionsCount}`), + ) + } + }) + } else { + done() + } + }, + ) + + step( + `presignedGetObject(bucketName, objectName, 1000, respHeaders, requestDate, cb)_bucketName:${versionedBucketName} _objectName:${versionedPresignObjName} _version:(2/2)`, + (done) => { + if (isVersioningSupported) { + client.presignedGetObject( + versionedBucketName, + objectsList[1].name, + 1000, + { versionId: objectsList[1].versionId }, + new Date(), + (e, presignedUrl) => { + if (e) { + return done() + } + let mobileClientReqWithProtocol = http + const getReqOpts = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) + getReqOpts.method = 'GET' + const _100kbmd5 = crypto.createHash('md5').update(_100_byte).digest('hex') + + const hash = crypto.createHash('md5') + if (getReqOpts.protocol === 'https:') { + mobileClientReqWithProtocol = https + } + const request = mobileClientReqWithProtocol.request(getReqOpts, (response) => { + // if delete marker. method not allowed. + if (response.statusCode !== 200) { + return new Error(`error on get : ${response.statusCode}`) + } + response.on('error', () => { + return done() + }) + response.on('end', () => { + const hashValue = hash.digest('hex') + if (hashValue === _100kbmd5) { + done() + } else { + return done(new Error('Unable to retrieve version of an object using presignedGetObject')) + } + }) + response.on('data', (data) => { + hash.update(data) + }) + }) + request.on('error', () => { + return done() + }) + request.end() + }, + ) + } else { + done() + } + }, + ) + + step(`removeObjects(bucketName, objectsList)_bucketName:${versionedBucketName}`, (done) => { + if (isVersioningSupported) { + client.removeObjects(versionedBucketName, objectsList, function (e) { + if (e) { + done(e) + } + done() + }) + } else { + done() + } + }) + }) + + describe('Object Lock API Bucket Options Test', () => { + // Isolate the bucket/object for easy debugging and tracking. + // Gateway mode does not support this header. + + describe('Object Lock support makeBucket API Tests', function () { + const lockEnabledBucketName = 'minio-js-test-lock-mb-' + uuid.v4() + let isFeatureSupported = false + step(`Check if bucket with object lock can be created:_bucketName:${lockEnabledBucketName}`, (done) => { + client.makeBucket(lockEnabledBucketName, { ObjectLocking: true }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + isFeatureSupported = true + if (err) { + return done(err) + } + done() + }) + }) + + step(`Get lock config on a bucket:_bucketName:${lockEnabledBucketName}`, (done) => { + if (isFeatureSupported) { + client.getObjectLockConfig(lockEnabledBucketName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + } else { + done() + } + }) + + step(`Check if bucket can be deleted:_bucketName:${lockEnabledBucketName}`, (done) => { + client.removeBucket(lockEnabledBucketName, (err) => { + if (isFeatureSupported) { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + } else { + done() + } + }) + }) + }) + + describe('Object Lock support Set/Get API Tests', function () { + const lockConfigBucketName = 'minio-js-test-lock-conf-' + uuid.v4() + let isFeatureSupported = false + step(`Check if bucket with object lock can be created:_bucketName:${lockConfigBucketName}`, (done) => { + client.makeBucket(lockConfigBucketName, { ObjectLocking: true }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + isFeatureSupported = true + if (err) { + return done(err) + } + done() + }) + }) + step(`Update or replace lock config on a bucket:_bucketName:${lockConfigBucketName}`, (done) => { + if (isFeatureSupported) { + client.setObjectLockConfig( + lockConfigBucketName, + { mode: 'GOVERNANCE', unit: 'Years', validity: 2 }, + (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }, + ) + } else { + done() + } + }) + step(`Get lock config on a bucket:_bucketName:${lockConfigBucketName}`, (done) => { + if (isFeatureSupported) { + client.getObjectLockConfig(lockConfigBucketName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + } else { + done() + } + }) + + step(`Set lock config on a bucket:_bucketName:${lockConfigBucketName}`, (done) => { + if (isFeatureSupported) { + client.setObjectLockConfig(lockConfigBucketName, {}, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + } else { + done() + } + }) + step(`Get and verify lock config on a bucket after reset/update:_bucketName:${lockConfigBucketName}`, (done) => { + if (isFeatureSupported) { + client.getObjectLockConfig(lockConfigBucketName, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + }) + } else { + done() + } + }) + + step(`Check if bucket can be deleted:_bucketName:${lockConfigBucketName}`, (done) => { + client.removeBucket(lockConfigBucketName, (err) => { + if (isFeatureSupported) { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + done() + } else { + done() + } + }) + }) + }) + }) + + describe('Object retention API Tests', () => { + // Isolate the bucket/object for easy debugging and tracking. + // Gateway mode does not support this header. + + describe('Object retention get/set API Test', function () { + const objRetentionBucket = 'minio-js-test-retention-' + uuid.v4() + const retentionObjName = 'RetentionObject' + let isFeatureSupported = false + let versionId = null + + step(`Check if bucket with object lock can be created:_bucketName:${objRetentionBucket}`, (done) => { + client.makeBucket(objRetentionBucket, { ObjectLocking: true }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + isFeatureSupported = true + if (err) { + return done(err) + } + done() + }) + }) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}, stream:100Kib_`, + (done) => { + // Put two versions of the same object. + if (isFeatureSupported) { + client + .putObject(objRetentionBucket, retentionObjName, readableStream(_1byte), _1byte.length, {}) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + + step( + `statObject(bucketName, objectName, statOpts)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}`, + (done) => { + if (isFeatureSupported) { + client.statObject(objRetentionBucket, retentionObjName, {}, (e, res) => { + versionId = res.versionId + done() + }) + } else { + done() + } + }, + ) + + step( + `putObjectRetention(bucketName, objectName, putOpts)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}`, + (done) => { + // Put two versions of the same object. + if (isFeatureSupported) { + let expirationDate = new Date() + // set expiry to start of next day. + expirationDate.setDate(expirationDate.getDate() + 1) + expirationDate.setUTCHours(0, 0, 0, 0) // Should be start of the day.(midnight) + + client + .putObjectRetention(objRetentionBucket, retentionObjName, { + governanceBypass: true, + mode: 'GOVERNANCE', + retainUntilDate: expirationDate.toISOString(), + versionId: versionId, + }) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + + step( + `getObjectRetention(bucketName, objectName, getOpts)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}`, + (done) => { + if (isFeatureSupported) { + client.getObjectRetention(objRetentionBucket, retentionObjName, { versionId: versionId }, () => { + done() + }) + } else { + done() + } + }, + ) + + step( + `removeObject(bucketName, objectName, removeOpts)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}`, + (done) => { + if (isFeatureSupported) { + client.removeObject( + objRetentionBucket, + retentionObjName, + { versionId: versionId, governanceBypass: true }, + () => { + done() + }, + ) + } else { + done() + } + }, + ) + + step(`removeBucket(bucketName, )_bucketName:${objRetentionBucket}`, (done) => { + if (isFeatureSupported) { + client.removeBucket(objRetentionBucket, () => { + done() + }) + } else { + done() + } + }) + }) + }) + + describe('Bucket Encryption Related APIs', () => { + // Isolate the bucket/object for easy debugging and tracking. + // this is not supported in gateway mode. + const encBucketName = 'minio-js-test-bucket-enc-' + uuid.v4() + before((done) => client.makeBucket(encBucketName, '', done)) + after((done) => client.removeBucket(encBucketName, done)) + + const encObjName = 'datafile-100-kB' + const encObjFileContent = Buffer.alloc(100 * 1024, 0) + let isEncryptionSupported = false + + step(`Set Encryption on a bucket:_bucketName:${encBucketName}`, (done) => { + // setBucketEncryption succeeds in NAS mode. + const buckEncPromise = client.setBucketEncryption(encBucketName) + buckEncPromise + .then(() => { + done() + }) + .catch(() => { + done() + }) + }) + + step(`Get encryption of a bucket:_bucketName:${encBucketName}`, (done) => { + const getBucEncObj = client.getBucketEncryption(encBucketName) + getBucEncObj + .then(() => { + done() + }) + .catch((err) => { + if (err && err.code === 'NotImplemented') { + isEncryptionSupported = false + return done() + } + if (err && err.code === 'ServerSideEncryptionConfigurationNotFoundError') { + return done() + } + if (err) { + return done(err) + } + done() + }) + }) + + step( + `Put an object to check for default encryption bucket:_bucketName:${encBucketName}, _objectName:${encObjName}`, + (done) => { + if (isEncryptionSupported) { + const putObjPromise = client.putObject(encBucketName, encObjName, encObjFileContent) + putObjPromise + .then(() => { + done() + }) + .catch(() => { + done() + }) + } else { + done() + } + }, + ) + + step( + `Stat of an object to check for default encryption applied on a bucket:_bucketName:${encBucketName}, _objectName:${encObjName}`, + (done) => { + if (isEncryptionSupported) { + const statObjPromise = client.statObject(encBucketName, encObjName) + statObjPromise + .then(() => { + done() + }) + .catch(() => { + done() + }) + } else { + done() + } + }, + ) + + step( + `Stat of an object to check for default encryption applied on a bucket:_bucketName:${encBucketName}`, + (done) => { + if (isEncryptionSupported) { + const getBuckEnc = client.getBucketEncryption(encBucketName) + getBuckEnc + .then(() => { + done() + }) + .catch(() => { + done() + }) + } else { + done() + } + }, + ) + + step(`Remove object on a bucket:_bucketName:${encBucketName}, _objectName:${encObjName}`, (done) => { + if (isEncryptionSupported) { + const removeObj = client.removeObject(encBucketName, encObjName) + removeObj + .then(() => { + done() + }) + .catch(() => { + done() + }) + } else { + done() + } + }) + + step(`Remove encryption on a bucket:_bucketName:${encBucketName}`, (done) => { + if (isEncryptionSupported) { + const removeObj = client.removeBucketEncryption(encBucketName) + removeObj + .then(() => { + done() + }) + .catch(() => { + done() + }) + } else { + done() + } + }) + step(`Get encryption on a bucket:_bucketName:${encBucketName}`, (done) => { + if (isEncryptionSupported) { + const getBuckEnc = client.getBucketEncryption(encBucketName) + getBuckEnc + .then(() => { + done() + }) + .catch(() => { + done() + }) + } else { + done() + } + }) + }) + + describe('Bucket Replication API Tests', () => { + // TODO - As of now, there is no api to get arn programmatically to setup replication through APIs and verify. + // Please refer to minio server documentation and mc cli. + // https://min.io/docs/minio/linux/administration/bucket-replication.html + // https://min.io/docs/minio/linux/reference/minio-mc/mc-replicate-add.html + }) + + describe('Object Legal hold API Tests', () => { + // Isolate the bucket/object for easy debugging and tracking. + // Gateway mode does not support this header. + let versionId = null + describe('Object Legal hold get/set API Test', function () { + const objLegalHoldBucketName = 'minio-js-test-legalhold-' + uuid.v4() + const objLegalHoldObjName = 'LegalHoldObject' + let isFeatureSupported = false + + step(`Check if bucket with object lock can be created:_bucketName:${objLegalHoldBucketName}`, (done) => { + client.makeBucket(objLegalHoldBucketName, { ObjectLocking: true }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + isFeatureSupported = true + if (err) { + return done(err) + } + done() + }) + }) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}, stream:100Kib_`, + (done) => { + if (isFeatureSupported) { + client + .putObject(objLegalHoldBucketName, objLegalHoldObjName, readableStream(_1byte), _1byte.length, {}) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + + step( + `statObject(bucketName, objectName, statOpts)_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, + (done) => { + if (isFeatureSupported) { + client.statObject(objLegalHoldBucketName, objLegalHoldObjName, {}, (e, res) => { + versionId = res.versionId + done() + }) + } else { + done() + } + }, + ) + + step( + `setObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, + (done) => { + if (isFeatureSupported) { + client.setObjectLegalHold(objLegalHoldBucketName, objLegalHoldObjName, () => { + done() + }) + } else { + done() + } + }, + ) + + step( + `setObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, + (done) => { + if (isFeatureSupported) { + client.setObjectLegalHold( + objLegalHoldBucketName, + objLegalHoldObjName, + { status: 'ON', versionId: versionId }, + () => { + done() + }, + ) + } else { + done() + } + }, + ) + + step( + `getObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, + (done) => { + if (isFeatureSupported) { + client.getObjectLegalHold(objLegalHoldBucketName, objLegalHoldObjName, () => { + done() + }) + } else { + done() + } + }, + ) + + step( + `setObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, + (done) => { + if (isFeatureSupported) { + client.setObjectLegalHold( + objLegalHoldBucketName, + objLegalHoldObjName, + { status: 'OFF', versionId: versionId }, + () => { + done() + }, + ) + } else { + done() + } + }, + ) + + step( + `getObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, + (done) => { + if (isFeatureSupported) { + client.getObjectLegalHold(objLegalHoldBucketName, objLegalHoldObjName, { versionId: versionId }, () => { + done() + }) + } else { + done() + } + }, + ) + + step( + `removeObject(bucketName, objectName, removeOpts)_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, + (done) => { + if (isFeatureSupported) { + client.removeObject( + objLegalHoldBucketName, + objLegalHoldObjName, + { versionId: versionId, governanceBypass: true }, + () => { + done() + }, + ) + } else { + done() + } + }, + ) + + step(`removeBucket(bucketName, )_bucketName:${objLegalHoldBucketName}`, (done) => { + if (isFeatureSupported) { + client.removeBucket(objLegalHoldBucketName, () => { + done() + }) + } else { + done() + } + }) + }) + }) + + describe('Object Name special characters test without Prefix', () => { + // Isolate the bucket/object for easy debugging and tracking. + const bucketNameForSpCharObjects = 'minio-js-test-obj-spwpre-' + uuid.v4() + before((done) => client.makeBucket(bucketNameForSpCharObjects, '', done)) + after((done) => client.removeBucket(bucketNameForSpCharObjects, done)) + + // Reference:: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html + // Host OS compatible File name characters/ file names. + + let objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 \u0040 amȡȹɆple&0a!-_.*'()&$@=;:+,?<>.pdf" + if (isWindowsPlatform) { + objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 u0040 amȡȹɆple&0a!-_.'()&$@=;+,.pdf" + } + + const objectContents = Buffer.alloc(100 * 1024, 0) + + describe('Without Prefix Test', function () { + step( + `putObject(bucketName, objectName, stream)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameSpecialChars}, stream:100Kib_`, + (done) => { + client + .putObject(bucketNameForSpCharObjects, objectNameSpecialChars, objectContents) + .then(() => { + done() + }) + .catch(done) + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", true`, + (done) => { + const listStream = client.listObjects(bucketNameForSpCharObjects, '', true) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + if (listedObject.name === objectNameSpecialChars) { + done() + } else { + return done(new Error(`Expected object Name: ${objectNameSpecialChars}: received:${listedObject.name}`)) + } + }) + listStream.on('error', function (e) { + done(e) + }) + }, + ) + + step( + `listObjectsV2(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", true`, + (done) => { + const listStream = client.listObjectsV2(bucketNameForSpCharObjects, '', true) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + if (listedObject.name === objectNameSpecialChars) { + done() + } else { + return done(new Error(`Expected object Name: ${objectNameSpecialChars}: received:${listedObject.name}`)) + } + }) + + listStream.on('error', function (e) { + done(e) + }) + }, + ) + step( + `extensions.listObjectsV2WithMetadata(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", true`, + (done) => { + const listStream = client.extensions.listObjectsV2WithMetadata(bucketNameForSpCharObjects, '', true) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + if (listedObject.name === objectNameSpecialChars) { + done() + } else { + return done(new Error(`Expected object Name: ${objectNameSpecialChars}: received:${listedObject.name}`)) + } + }) + + listStream.on('error', function (e) { + done(e) + }) + }, + ) + + step( + `getObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameSpecialChars}`, + (done) => { + client + .getObject(bucketNameForSpCharObjects, objectNameSpecialChars) + .then((stream) => { + stream.on('data', function () {}) + stream.on('end', done) + }) + .catch(done) + }, + ) + + step( + `statObject(bucketName, objectName, cb)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameSpecialChars}`, + (done) => { + client.statObject(bucketNameForSpCharObjects, objectNameSpecialChars, (e) => { + if (e) { + return done(e) + } + done() + }) + }, + ) + + step( + `removeObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameSpecialChars}`, + (done) => { + client + .removeObject(bucketNameForSpCharObjects, objectNameSpecialChars) + .then(() => done()) + .catch(done) + }, + ) + }) + }) + describe('Object Name special characters test with a Prefix', () => { + // Isolate the bucket/object for easy debugging and tracking. + const bucketNameForSpCharObjects = 'minio-js-test-obj-spnpre-' + uuid.v4() + before((done) => client.makeBucket(bucketNameForSpCharObjects, '', done)) + after((done) => client.removeBucket(bucketNameForSpCharObjects, done)) + + // Reference:: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html + let objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 \u0040 amȡȹɆple&0a!-_.*'()&$@=;:+,?<>.pdf" + if (isWindowsPlatform) { + objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 u0040 amȡȹɆple&0a!-_.'()&$@=;+,.pdf" + } + const prefix = 'test' + const objectNameWithPrefixForSpecialChars = `${prefix}/${objectNameSpecialChars}` + + const objectContents = Buffer.alloc(100 * 1024, 0) + + describe('With Prefix Test', function () { + step( + `putObject(bucketName, objectName, stream)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefixForSpecialChars}, stream:100Kib`, + (done) => { + client + .putObject(bucketNameForSpCharObjects, objectNameWithPrefixForSpecialChars, objectContents) + .then(() => { + done() + }) + .catch(done) + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:${prefix}, recursive:true`, + (done) => { + const listStream = client.listObjects(bucketNameForSpCharObjects, prefix, true) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + if (listedObject.name === objectNameWithPrefixForSpecialChars) { + done() + } else { + return done( + new Error( + `Expected object Name: ${objectNameWithPrefixForSpecialChars}: received:${listedObject.name}`, + ), + ) + } + }) + listStream.on('error', function (e) { + done(e) + }) + }, + ) + + step( + `listObjectsV2(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:${prefix}, recursive:true`, + (done) => { + const listStream = client.listObjectsV2(bucketNameForSpCharObjects, prefix, true) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + if (listedObject.name === objectNameWithPrefixForSpecialChars) { + done() + } else { + return done( + new Error( + `Expected object Name: ${objectNameWithPrefixForSpecialChars}: received:${listedObject.name}`, + ), + ) + } + }) + listStream.on('error', function (e) { + done(e) + }) + }, + ) + + step( + `extensions.listObjectsV2WithMetadata(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:${prefix}, recursive:true`, + (done) => { + const listStream = client.extensions.listObjectsV2WithMetadata(bucketNameForSpCharObjects, prefix, true) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + if (listedObject.name === objectNameWithPrefixForSpecialChars) { + done() + } else { + return done( + new Error( + `Expected object Name: ${objectNameWithPrefixForSpecialChars}: received:${listedObject.name}`, + ), + ) + } + }) + listStream.on('error', function (e) { + done(e) + }) + }, + ) + + step( + `getObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName_:${objectNameWithPrefixForSpecialChars}`, + (done) => { + client + .getObject(bucketNameForSpCharObjects, objectNameWithPrefixForSpecialChars) + .then((stream) => { + stream.on('data', function () {}) + stream.on('end', done) + }) + .catch(done) + }, + ) + + step( + `statObject(bucketName, objectName, cb)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefixForSpecialChars}`, + (done) => { + client.statObject(bucketNameForSpCharObjects, objectNameWithPrefixForSpecialChars, (e) => { + if (e) { + return done(e) + } + done() + }) + }, + ) + + step( + `removeObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefixForSpecialChars}`, + (done) => { + client + .removeObject(bucketNameForSpCharObjects, objectNameWithPrefixForSpecialChars) + .then(() => done()) + .catch(done) + }, + ) + }) + }) + + describe('Assume Role Tests', () => { + // Run only in local environment. + const bucketName = 'minio-js-test-assume-role' + uuid.v4() + before((done) => client.makeBucket(bucketName, '', done)) + after((done) => client.removeBucket(bucketName, done)) + + const objName = 'datafile-100-kB' + const objContent = Buffer.alloc(100 * 1024, 0) + + const canRunAssumeRoleTest = clientConfigParams.endPoint.includes('localhost') + const stsEndPoint = 'http://localhost:9000' + + try { + if (canRunAssumeRoleTest) { + // Creates a new Client with assume role provider for testing. + const assumeRoleProvider = new AssumeRoleProvider({ + stsEndpoint: stsEndPoint, + accessKey: client.accessKey, + secretKey: client.secretKey, + }) + + const aRoleConf = Object.assign({}, clientConfigParams, { credentialsProvider: assumeRoleProvider }) + + const assumeRoleClient = new minio.Client(aRoleConf) + assumeRoleClient.region = server_region + + describe('Put an Object', function () { + step( + `Put an object with assume role credentials: bucket:_bucketName:${bucketName}, _objectName:${objName}`, + (done) => { + const putObjPromise = assumeRoleClient.putObject(bucketName, objName, objContent) + putObjPromise + .then(() => { + done() + }) + .catch(done) + }, + ) + + step(`Remove an Object with assume role credentials:${bucketName}, _objectName:${objName}`, (done) => { + const removeObjPromise = assumeRoleClient.removeObject(bucketName, objName) + removeObjPromise + .then(() => { + done() + }) + .catch(done) + }) + }) + } + } catch (err) { + // eslint-disable-next-line no-console + console.error('Error in Assume Role API.', err) + } + }) + + describe('Put Object Response test with multipart on an Un versioned bucket:', () => { + const bucketToTestMultipart = 'minio-js-test-put-multiuv-' + uuid.v4() + + before((done) => client.makeBucket(bucketToTestMultipart, '', done)) + after((done) => client.removeBucket(bucketToTestMultipart, done)) + + // Non multipart Test + step( + `putObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_100kbObjectName}, stream:100KB`, + (done) => { + const stream = readableStream(_100kb) + client.putObject(bucketToTestMultipart, _100kbObjectName, stream, metaData, (e, res) => { + if (e) { + done(e) + } + if (res.versionId === null && res.etag) { + done() + } else { + done( + new Error( + `Incorrect response format, expected: {versionId:null, etag:"some-etag-hash"} received:${JSON.stringify( + res, + )}`, + ), + ) + } + }) + }, + ) + step( + `removeObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_100kbObjectName}`, + (done) => { + client + .removeObject(bucketToTestMultipart, _100kbObjectName) + .then(() => done()) + .catch(done) + }, + ) + + // Multipart Test + step( + `putObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_65mbObjectName}, stream:65MB`, + (done) => { + const stream = readableStream(_65mb) + client.putObject(bucketToTestMultipart, _65mbObjectName, stream, metaData, (e, res) => { + if (e) { + done(e) + } + if (res.versionId === null && res.etag) { + done() + } else { + done( + new Error( + `Incorrect response format, expected: {versionId:null, etag:"some-etag-hash"} received:${JSON.stringify( + res, + )}`, + ), + ) + } + }) + }, + ) + step( + `removeObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_65mbObjectName}`, + (done) => { + client + .removeObject(bucketToTestMultipart, _65mbObjectName) + .then(() => done()) + .catch(done) + }, + ) + }) + + describe('Put Object Response test with multipart on Versioned bucket:', () => { + const bucketToTestMultipart = 'minio-js-test-put-multiv-' + uuid.v4() + let isVersioningSupported = false + let versionedObjectRes = null + let versionedMultiPartObjectRes = null + + before((done) => + client.makeBucket(bucketToTestMultipart, '', () => { + client.setBucketVersioning(bucketToTestMultipart, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + isVersioningSupported = true + done() + }) + }), + ) + after((done) => client.removeBucket(bucketToTestMultipart, done)) + + // Non multipart Test + step( + `putObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_100kbObjectName}, stream:100KB`, + (done) => { + if (isVersioningSupported) { + const stream = readableStream(_100kb) + client.putObject(bucketToTestMultipart, _100kbObjectName, stream, metaData, (e, res) => { + if (e) { + done(e) + } + if (res.versionId && res.etag) { + versionedObjectRes = res + done() + } else { + done( + new Error( + `Incorrect response format, expected: {versionId:'some-version-hash', etag:"some-etag-hash"} received:${JSON.stringify( + res, + )}`, + ), + ) + } + }) + } else { + done() + } + }, + ) + step( + `removeObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_100kbObjectName}`, + (done) => { + if (isVersioningSupported) { + client + .removeObject(bucketToTestMultipart, _100kbObjectName, { versionId: versionedObjectRes.versionId }) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + + // Multipart Test + step( + `putObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_65mbObjectName}, stream:65MB`, + (done) => { + if (isVersioningSupported) { + const stream = readableStream(_65mb) + client.putObject(bucketToTestMultipart, _65mbObjectName, stream, metaData, (e, res) => { + if (e) { + done(e) + } + if (res.versionId && res.etag) { + versionedMultiPartObjectRes = res + done() + } else { + done( + new Error( + `Incorrect response format, expected: {versionId:null, etag:"some-etag-hash"} received:${JSON.stringify( + res, + )}`, + ), + ) + } + }) + } else { + done() + } + }, + ) + step( + `removeObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_65mbObjectName}`, + (done) => { + if (isVersioningSupported) { + client + .removeObject(bucketToTestMultipart, _65mbObjectName, { versionId: versionedMultiPartObjectRes.versionId }) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + }) + describe('Compose Object API Tests', () => { + /** + * Steps: + * 1. Generate a 100MB file in temp dir + * 2. Split into 26 MB parts in temp dir + * 3. Upload parts to bucket + * 4. Compose into a single object in the same bucket. + * 5. Remove the file parts (Clean up) + * 6. Remove the file itself (Clean up) + * 7. Remove bucket. (Clean up) + */ + + var _100mbFileToBeSplitAndComposed = Buffer.alloc(100 * 1024 * 1024, 0) + let composeObjectTestBucket = 'minio-js-test-compose-obj-' + uuid.v4() + before((done) => client.makeBucket(composeObjectTestBucket, '', done)) + after((done) => client.removeBucket(composeObjectTestBucket, done)) + + const composedObjName = '_100-mb-file-to-test-compose' + const tmpSubDir = `${tmpDir}/compose` + var fileToSplit = `${tmpSubDir}/${composedObjName}` + let partFilesNamesWithPath = [] + let partObjNameList = [] + let isSplitSuccess = false + step(`Create a local file of 100 MB and split `, (done) => { + try { + fs.writeFileSync(fileToSplit, _100mbFileToBeSplitAndComposed) + // 100 MB split into 26 MB part size. + splitFile + .splitFileBySize(fileToSplit, 26 * 1024 * 1024) + .then((names) => { + partFilesNamesWithPath = names + isSplitSuccess = true + done() + }) + .catch(() => { + done() + }) + } catch (err) { + done() + } + }) + + step(`Upload parts to Bucket_bucketName:${composeObjectTestBucket}, _objectName:${partObjNameList}`, (done) => { + if (isSplitSuccess) { + const fileSysToBucket = partFilesNamesWithPath.map((partFileName) => { + const partObjName = partFileName.substr((tmpSubDir + '/').length) + partObjNameList.push(partObjName) + return client.fPutObject(composeObjectTestBucket, partObjName, partFileName, {}) + }) + + Promise.all(fileSysToBucket) + .then(() => { + done() + }) + .catch(done) + } else { + done() + } + }) + + step( + `composeObject(destObjConfig, sourceObjList, cb)::_bucketName:${composeObjectTestBucket}, _objectName:${composedObjName}`, + (done) => { + if (isSplitSuccess) { + const sourcePartObjList = partObjNameList.map((partObjName) => { + return new CopySourceOptions({ + Bucket: composeObjectTestBucket, + Object: partObjName, + }) + }) + + const destObjConfig = new CopyDestinationOptions({ + Bucket: composeObjectTestBucket, + Object: composedObjName, + }) + + client.composeObject(destObjConfig, sourcePartObjList).then((e) => { + if (e) { + return done(e) + } + done() + }) + } else { + done() + } + }, + ) + + step( + `statObject(bucketName, objectName, cb)::_bucketName:${composeObjectTestBucket}, _objectName:${composedObjName}`, + (done) => { + if (isSplitSuccess) { + client.statObject(composeObjectTestBucket, composedObjName, (e) => { + if (e) { + return done(e) + } + done() + }) + } else { + done() + } + }, + ) + + step( + `Remove Object Parts from Bucket::_bucketName:${composeObjectTestBucket}, _objectNames:${partObjNameList}`, + (done) => { + if (isSplitSuccess) { + const sourcePartObjList = partObjNameList.map((partObjName) => { + return client.removeObject(composeObjectTestBucket, partObjName) + }) + + Promise.all(sourcePartObjList) + .then(() => { + done() + }) + .catch(done) + } else { + done() + } + }, + ) + + step( + `Remove Composed target Object::_bucketName:${composeObjectTestBucket}, objectName:${composedObjName}`, + (done) => { + if (isSplitSuccess) { + client + .removeObject(composeObjectTestBucket, composedObjName) + .then(() => { + done() + }) + .catch(done) + } else { + done() + } + }, + ) + + step('Clean up temp directory part files', (done) => { + if (isSplitSuccess) { + removeDirAndFiles(tmpSubDir) + } + done() + }) + }) + + describe('Special Characters test on a prefix and an object', () => { + // Isolate the bucket/object for easy debugging and tracking. + const bucketNameForSpCharObjects = 'minio-js-test-obj-sppre' + uuid.v4() + before((done) => client.makeBucket(bucketNameForSpCharObjects, '', done)) + after((done) => client.removeBucket(bucketNameForSpCharObjects, done)) + + const specialCharPrefix = 'SpecialMenùäöüexPrefix/' + + let objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 \u0040 amȡȹɆple&0a!-_.*'()&$@=;:+,?<>.pdf" + if (isWindowsPlatform) { + objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 u0040 amȡȹɆple&0a!-_.'()&$@=;+,.pdf" + } + + const objectNameWithPrefix = `${specialCharPrefix}${objectNameSpecialChars}` + + const objectContents = Buffer.alloc(100 * 1024, 0) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefix}, stream:100Kib`, + (done) => { + client + .putObject(bucketNameForSpCharObjects, objectNameWithPrefix, objectContents) + .then(() => { + done() + }) + .catch(done) + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", false`, + (done) => { + const listStream = client.listObjects(bucketNameForSpCharObjects, '', false) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + if (listedObject.prefix === specialCharPrefix) { + done() + } else { + return done(new Error(`Expected Prefix Name: ${specialCharPrefix}: received:${listedObject.prefix}`)) + } + }) + listStream.on('error', function (e) { + done(e) + }) + }, + ) + + step( + `listObjectsV2(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", false`, + (done) => { + const listStream = client.listObjectsV2(bucketNameForSpCharObjects, '', false) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + // verify that the prefix special characters are handled + if (listedObject.prefix === specialCharPrefix) { + done() + } else { + return done(new Error(`Expected object Name: ${specialCharPrefix}: received:${listedObject.prefix}`)) + } + }) + + listStream.on('error', function (e) { + done(e) + }) + }, + ) + + step( + `extensions.listObjectsV2WithMetadata(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", false`, + (done) => { + const listStream = client.extensions.listObjectsV2WithMetadata(bucketNameForSpCharObjects, '', false) + let listedObject = null + listStream.on('data', function (obj) { + listedObject = obj + }) + listStream.on('end', () => { + if (listedObject.prefix === specialCharPrefix) { + done() + } else { + return done(new Error(`Expected object Name: ${specialCharPrefix}: received:${listedObject.prefix}`)) + } + }) + + listStream.on('error', function (e) { + done(e) + }) + }, + ) + + step( + `getObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefix}`, + (done) => { + client + .getObject(bucketNameForSpCharObjects, objectNameWithPrefix) + .then((stream) => { + stream.on('data', function () {}) + stream.on('end', done) + }) + .catch(done) + }, + ) + + step( + `statObject(bucketName, objectName, cb)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefix}`, + (done) => { + client.statObject(bucketNameForSpCharObjects, objectNameWithPrefix, (e) => { + if (e) { + return done(e) + } + done() + }) + }, + ) + step( + `removeObject(bucketName, objectName)_bucketName:${objectNameWithPrefix}, _objectName:${objectNameWithPrefix}`, + (done) => { + client + .removeObject(bucketNameForSpCharObjects, objectNameWithPrefix) + .then(() => done()) + .catch(done) + }, + ) + }) + describe('Test listIncompleteUploads (Multipart listing) with special characters', () => { + const specialCharPrefix = 'SpecialMenùäöüexPrefix/' + const objectNameSpecialChars = 'äöüex.pdf' + const spObjWithPrefix = `${specialCharPrefix}${objectNameSpecialChars}` + const spBucketName = 'minio-js-test-lin-sppre' + uuid.v4() + + before((done) => client.makeBucket(spBucketName, '', done)) + after((done) => client.removeBucket(spBucketName, done)) + + step( + `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${spBucketName}, objectName:${spObjWithPrefix}, metaData:${metaData}`, + (done) => { + client.initiateNewMultipartUpload(spBucketName, spObjWithPrefix, metaData, done) + }, + ) + + step( + `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${spBucketName}, prefix:${spObjWithPrefix}, recursive: true_`, + function (done) { + // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. + let hostSkipList = ['s3.amazonaws.com'] + if (!hostSkipList.includes(client.host)) { + done() + return + } + + var found = false + client + .listIncompleteUploads(spBucketName, spObjWithPrefix, true) + .on('error', (e) => done(e)) + .on('data', (data) => { + if (data.key === spObjWithPrefix) { + found = true + } + }) + .on('end', () => { + if (found) { + return done() + } + done(new Error(`${spObjWithPrefix} not found during listIncompleteUploads`)) + }) + }, + ) + + step( + `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${spBucketName}, recursive: true_`, + function (done) { + // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. + let hostSkipList = ['s3.amazonaws.com'] + if (!hostSkipList.includes(client.host)) { + done() + return + } + + var found = false + client + .listIncompleteUploads(spBucketName, '', false) + .on('error', (e) => done(e)) + .on('data', (data) => { + // check the prefix + if (data.prefix === specialCharPrefix) { + found = true + } + }) + .on('end', () => { + if (found) { + return done() + } + done(new Error(`${specialCharPrefix} not found during listIncompleteUploads`)) + }) + }, + ) + step( + `removeIncompleteUploads(bucketName, prefix)_bucketName:${spBucketName}, prefix:${spObjWithPrefix}_`, + (done) => { + client.removeIncompleteUpload(spBucketName, spObjWithPrefix).then(done).catch(done) + }, + ) + }) + describe('Select Object content API Test', function () { + const selObjContentBucket = 'minio-js-test-sel-object-' + uuid.v4() + const selObject = 'SelectObjectContent' + // Isolate the bucket/object for easy debugging and tracking. + before((done) => client.makeBucket(selObjContentBucket, '', done)) + after((done) => client.removeBucket(selObjContentBucket, done)) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${selObjContentBucket}, objectName:${selObject}, stream:csv`, + (done) => { + // Save a CSV file so that we can query later to test the results. + client + .putObject( + selObjContentBucket, + selObject, + 'Name,PhoneNumber,City,Occupation\n' + + 'Sam,(949) 123-45567,Irvine,Solutions Architect\n' + + 'Vinod,(949) 123-4556,Los Angeles,Solutions Architect\n' + + 'Jeff,(949) 123-45567,Seattle,AWS Evangelist\n' + + 'Jane,(949) 123-45567,Chicago,Developer\n' + + 'Sean,(949) 123-45567,Chicago,Developer\n' + + 'Mary,(949) 123-45567,Chicago,Developer\n' + + 'Kate,(949) 123-45567,Chicago,Developer', + {}, + ) + .then(() => { + done() + }) + .catch(done) + }, + ) + + step( + `selectObjectContent(bucketName, objectName, selectOpts)_bucketName:${selObjContentBucket}, objectName:${selObject}`, + (done) => { + const selectOpts = { + expression: 'SELECT * FROM s3object s where s."Name" = \'Jane\'', + expressionType: 'SQL', + inputSerialization: { + CSV: { FileHeaderInfo: 'Use', RecordDelimiter: '\n', FieldDelimiter: ',' }, + CompressionType: 'NONE', + }, + outputSerialization: { CSV: { RecordDelimiter: '\n', FieldDelimiter: ',' } }, + requestProgress: { Enabled: true }, + } + + client + .selectObjectContent(selObjContentBucket, selObject, selectOpts) + .then((result) => { + // verify the select query result string. + if (result.getRecords().toString() === 'Jane,(949) 123-45567,Chicago,Developer\n') { + // \n for csv line ending. + done() + } else { + return done( + new Error( + `Expected Result did not match received:${result + .getRecords() + .toString()} expected:"Jane,(949) 123-45567,Chicago,Developer\n"`, + ), + ) + } + }) + .catch(done) + }, + ) + + step(`Remove Object post select of content:_bucketName:${selObjContentBucket},objectName:${selObject}`, (done) => { + client + .removeObject(selObjContentBucket, selObject) + .then(() => done()) + .catch(done) + }) + }) + + describe('Force Deletion of objects with versions', function () { + // Isolate the bucket/object for easy debugging and tracking. + const fdWithVerBucket = 'minio-js-fd-version-' + uuid.v4() + const fdObjectName = 'datafile-100-kB' + const fdObject = dataDir ? fs.readFileSync(dataDir + '/' + fdObjectName) : Buffer.alloc(100 * 1024, 0) + + before((done) => client.makeBucket(fdWithVerBucket, '', done)) + after((done) => client.removeBucket(fdWithVerBucket, done)) + + describe('Test for force removal of multiple versions', function () { + let isVersioningSupported = false + const objVersionList = [] + step( + `setBucketVersioning(bucketName, versionConfig):_bucketName:${fdWithVerBucket},versionConfig:{Status:"Enabled"} `, + (done) => { + client.setBucketVersioning(fdWithVerBucket, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + isVersioningSupported = true + done() + }) + }, + ) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${fdWithVerBucket}, objectName:${fdObjectName}, stream:100Kib_`, + (done) => { + if (isVersioningSupported) { + client + .putObject(fdWithVerBucket, fdObjectName, fdObject) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + // Put two versions of the same object. + step( + `putObject(bucketName, objectName, stream)_bucketName:${fdWithVerBucket}, objectName:${fdObjectName}, stream:100Kib_`, + (done) => { + if (isVersioningSupported) { + client + .putObject(fdWithVerBucket, fdObjectName, fdObject) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + + step( + `removeObject(bucketName, objectList, removeOpts)_bucketName:${fdWithVerBucket}_Remove ${objVersionList.length} objects`, + (done) => { + if (isVersioningSupported) { + client.removeObject(fdWithVerBucket, fdObjectName, { forceDelete: true }, () => { + done() + }) + } else { + done() + } + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${fdWithVerBucket}, prefix: '', recursive:true_`, + (done) => { + if (isVersioningSupported) { + client + .listObjects(fdWithVerBucket, '', true, { IncludeVersion: true }) + .on('error', done) + .on('end', () => { + if (_.isEqual(0, objVersionList.length)) { + return done() + } + return done(new Error(`listObjects lists ${objVersionList.length} objects, expected 0`)) + }) + .on('data', (data) => { + objVersionList.push(data) + }) + } else { + done() + } + }, + ) + }) + }) + + describe('Force Deletion of prefix with versions', function () { + // Isolate the bucket/object for easy debugging and tracking. + const fdPrefixBucketName = 'minio-js-fd-version-' + uuid.v4() + const fdPrefixObjName = 'my-prefix/datafile-100-kB' + const fdPrefixObject = dataDir ? fs.readFileSync(dataDir + '/' + fdPrefixObjName) : Buffer.alloc(100 * 1024, 0) + + before((done) => client.makeBucket(fdPrefixBucketName, '', done)) + after((done) => client.removeBucket(fdPrefixBucketName, done)) + + describe('Test for removal of multiple versions', function () { + let isVersioningSupported = false + const objVersionList = [] + step( + `setBucketVersioning(bucketName, versionConfig):_bucketName:${fdPrefixBucketName},versionConfig:{Status:"Enabled"} `, + (done) => { + client.setBucketVersioning(fdPrefixBucketName, { Status: 'Enabled' }, (err) => { + if (err && err.code === 'NotImplemented') { + return done() + } + if (err) { + return done(err) + } + isVersioningSupported = true + done() + }) + }, + ) + + step( + `putObject(bucketName, objectName, stream)_bucketName:${fdPrefixBucketName}, objectName:${fdPrefixObjName}, stream:100Kib_`, + (done) => { + if (isVersioningSupported) { + client + .putObject(fdPrefixBucketName, fdPrefixObjName, fdPrefixObject) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + // Put two versions of the same object. + step( + `putObject(bucketName, objectName, stream)_bucketName:${fdPrefixBucketName}, objectName:${fdPrefixObjName}, stream:100Kib_`, + (done) => { + if (isVersioningSupported) { + client + .putObject(fdPrefixBucketName, fdPrefixObjName, fdPrefixObject) + .then(() => done()) + .catch(done) + } else { + done() + } + }, + ) + + step( + `removeObject(bucketName, objectList, removeOpts)_bucketName:${fdPrefixBucketName}_Remove ${objVersionList.length} objects`, + (done) => { + if (isVersioningSupported) { + client.removeObject(fdPrefixBucketName, 'my-prefix/', { forceDelete: true }, () => { + done() + }) + } else { + done() + } + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${fdPrefixBucketName}, prefix: '', recursive:true_`, + (done) => { + if (isVersioningSupported) { + client + .listObjects(fdPrefixBucketName, '/my-prefix', true, { IncludeVersion: true }) + .on('error', done) + .on('end', () => { + if (_.isEqual(0, objVersionList.length)) { + return done() + } + return done(new Error(`listObjects lists ${objVersionList.length} objects, expected 0`)) + }) + .on('data', (data) => { + objVersionList.push(data) + }) + } else { + done() + } + }, + ) + }) + }) + + describe('Force Deletion of objects without versions', function () { + // Isolate the bucket/object for easy debugging and tracking. + const versionedBucketName = 'minio-js-fd-nv-' + uuid.v4() + const versioned_100kbObjectName = 'datafile-100-kB' + const versioned_100kb_Object = dataDir + ? fs.readFileSync(dataDir + '/' + versioned_100kbObjectName) + : Buffer.alloc(100 * 1024, 0) + + before((done) => client.makeBucket(versionedBucketName, '', done)) + after((done) => client.removeBucket(versionedBucketName, done)) + + describe('Test force removal of an object', function () { + step( + `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, + (done) => { + client + .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) + .then(() => done()) + .catch(done) + }, + ) + + step( + `removeObject(bucketName, objectList, removeOpts)_bucketName:${versionedBucketName}_Remove 1 object`, + (done) => { + client.removeObject(versionedBucketName, versioned_100kbObjectName, { forceDelete: true }, () => { + done() + }) + }, + ) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, + (done) => { + let objVersionList = [] + client + .listObjects(versionedBucketName, '', true, {}) + .on('error', done) + .on('end', () => { + if (_.isEqual(0, objVersionList.length)) { + return done() + } + return done(new Error(`listObjects lists ${objVersionList.length} objects, expected 0`)) + }) + .on('data', (data) => { + objVersionList.push(data) + }) + }, + ) + }) + }) + + describe('Force Deletion of prefix', function () { + // Isolate the bucket/object for easy debugging and tracking. + const fdPrefixBucket = 'minio-js-fd-nv-' + uuid.v4() + const fdObjectName = 'my-prefix/datafile-100-kB' + const fdObject = dataDir ? fs.readFileSync(dataDir + '/' + fdObjectName) : Buffer.alloc(100 * 1024, 0) + + before((done) => client.makeBucket(fdPrefixBucket, '', done)) + after((done) => client.removeBucket(fdPrefixBucket, done)) + + describe('Test force removal of a prefix', function () { + step( + `putObject(bucketName, objectName, stream)_bucketName:${fdPrefixBucket}, objectName:${fdObjectName}, stream:100Kib_`, + (done) => { + client + .putObject(fdPrefixBucket, fdObjectName, fdObject) + .then(() => done()) + .catch(done) + }, + ) + + step(`removeObject(bucketName, objectList, removeOpts)_bucketName:${fdPrefixBucket}_Remove 1 object`, (done) => { + client.removeObject(fdPrefixBucket, '/my-prefix', { forceDelete: true }, () => { + done() + }) + }) + + step( + `listObjects(bucketName, prefix, recursive)_bucketName:${fdPrefixBucket}, prefix: 'my-prefix', recursive:true_`, + (done) => { + let objList = [] + client + .listObjects(fdPrefixBucket, 'my-prefix', true, {}) + .on('error', done) + .on('end', () => { + if (_.isEqual(0, objList.length)) { + return done() + } + return done(new Error(`listObjects lists ${objList.length} objects, expected 0`)) + }) + .on('data', (data) => { + objList.push(data) + }) + }, + ) + }) + }) +}) diff --git a/tests/unit/test.js b/tests/unit/test.js new file mode 100644 index 00000000..ceaf4256 --- /dev/null +++ b/tests/unit/test.js @@ -0,0 +1,2107 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as Stream from 'node:stream' + +import { assert } from 'chai' +import Nock from 'nock' + +import { + calculateEvenSplits, + CopyDestinationOptions, + CopySourceOptions, + isArray, + isValidEndpoint, + isValidIP, + makeDateLong, + makeDateShort, + partsRequired, +} from '../../src/helpers.js' +import * as Minio from '../../src/minio.js' + +const Package = { version: 'development' } + +describe('Helpers', () => { + it('should validate for s3 endpoint', () => { + assert.equal(isValidEndpoint('s3.amazonaws.com'), true) + }) + it('should validate for s3 china', () => { + assert.equal(isValidEndpoint('s3.cn-north-1.amazonaws.com.cn'), true) + }) + it('should validate for us-west-2', () => { + assert.equal(isValidEndpoint('s3-us-west-2.amazonaws.com'), true) + }) + it('should fail for invalid endpoint characters', () => { + assert.equal(isValidEndpoint('111.#2.11'), false) + }) + + it('should make date short', () => { + let date = new Date('2012-12-03T17:25:36.331Z') + + assert.equal(makeDateShort(date), '20121203') + }) + it('should make date long', () => { + let date = new Date('2017-08-11T17:26:34.935Z') + + assert.equal(makeDateLong(date), '20170811T172634Z') + }) + + // Adopted from minio-go sdk + const oneGB = 1024 * 1024 * 1024 + const fiveGB = 5 * oneGB + + const OBJ_SIZES = { + gb1: oneGB, + gb5: fiveGB, + gb5p1: fiveGB + 1, + gb10p1: 2 * fiveGB + 1, + gb10p2: 2 * fiveGB + 2, + } + + const maxMultipartPutObjectSize = 1024 * 1024 * 1024 * 1024 * 5 + + it('Parts Required Test cases ', () => { + const expectedPartsRequiredTestCases = [ + { value: 0, expected: 0 }, + { value: 1, expected: 1 }, + { value: fiveGB, expected: 10 }, + { value: OBJ_SIZES.gb5p1, expected: 10 }, + { value: 2 * fiveGB, expected: 20 }, + { value: OBJ_SIZES.gb10p1, expected: 20 }, + { value: OBJ_SIZES.gb10p2, expected: 20 }, + { value: OBJ_SIZES.gb10p1 + OBJ_SIZES.gb10p2, expected: 40 }, + { value: maxMultipartPutObjectSize, expected: 10000 }, + ] + + expectedPartsRequiredTestCases.forEach((testCase) => { + const fnResult = partsRequired(testCase.value) + assert.equal(fnResult, testCase.expected) + }) + }) + it('Even split of Sizes Test cases ', () => { + // Adopted from minio-go sdk + const expectedSplitsTestCases = [ + { size: 0, sourceConfig: new CopySourceOptions({ Start: -1 }), expectedStart: null, expectedEnd: null }, + { size: 1, sourceConfig: new CopySourceOptions({ Start: -1 }), expectedStart: [undefined], expectedEnd: [NaN] }, + { size: 1, sourceConfig: new CopySourceOptions({ Start: 0 }), expectedStart: [0], expectedEnd: [0] }, + { + size: OBJ_SIZES.gb1, + sourceConfig: new CopySourceOptions({ Start: -1 }), + expectedStart: [0, 536870912], + expectedEnd: [536870911, 1073741823], + }, + { + size: OBJ_SIZES.gb5, + sourceConfig: new CopySourceOptions({ Start: -1 }), + expectedStart: [ + 0, 536870912, 1073741824, 1610612736, 2147483648, 2684354560, 3221225472, 3758096384, 4294967296, 4831838208, + ], + expectedEnd: [ + 536870911, 1073741823, 1610612735, 2147483647, 2684354559, 3221225471, 3758096383, 4294967295, 4831838207, + 5368709119, + ], + }, + + // 2 part splits + { + size: OBJ_SIZES.gb5p1, + sourceConfig: new CopySourceOptions({ Start: -1 }), + expectedStart: [ + 0, 536870913, 1073741825, 1610612737, 2147483649, 2684354561, 3221225473, 3758096385, 4294967297, 4831838209, + ], + expectedEnd: [ + 536870912, 1073741824, 1610612736, 2147483648, 2684354560, 3221225472, 3758096384, 4294967296, 4831838208, + 5368709120, + ], + }, + { + size: OBJ_SIZES.gb5p1, + sourceConfig: new CopySourceOptions({ Start: -1 }), + expectedStart: [ + 0, 536870913, 1073741825, 1610612737, 2147483649, 2684354561, 3221225473, 3758096385, 4294967297, 4831838209, + ], + expectedEnd: [ + 536870912, 1073741824, 1610612736, 2147483648, 2684354560, 3221225472, 3758096384, 4294967296, 4831838208, + 5368709120, + ], + }, + + // 3 part splits + { + size: OBJ_SIZES.gb10p1, + sourceConfig: new CopySourceOptions({ Start: -1 }), + expectedStart: [ + 0, 536870913, 1073741825, 1610612737, 2147483649, 2684354561, 3221225473, 3758096385, 4294967297, 4831838209, + 5368709121, 5905580033, 6442450945, 6979321857, 7516192769, 8053063681, 8589934593, 9126805505, 9663676417, + 10200547329, + ], + expectedEnd: [ + 536870912, 1073741824, 1610612736, 2147483648, 2684354560, 3221225472, 3758096384, 4294967296, 4831838208, + 5368709120, 5905580032, 6442450944, 6979321856, 7516192768, 8053063680, 8589934592, 9126805504, 9663676416, + 10200547328, 10737418240, + ], + }, + { + size: OBJ_SIZES.gb10p2, + sourceConfig: new CopySourceOptions({ Start: -1 }), + expectedStart: [ + 0, 536870913, 1073741826, 1610612738, 2147483650, 2684354562, 3221225474, 3758096386, 4294967298, 4831838210, + 5368709122, 5905580034, 6442450946, 6979321858, 7516192770, 8053063682, 8589934594, 9126805506, 9663676418, + 10200547330, + ], + expectedEnd: [ + 536870912, 1073741825, 1610612737, 2147483649, 2684354561, 3221225473, 3758096385, 4294967297, 4831838209, + 5368709121, 5905580033, 6442450945, 6979321857, 7516192769, 8053063681, 8589934593, 9126805505, 9663676417, + 10200547329, 10737418241, + ], + }, + ] + + expectedSplitsTestCases.forEach((testCase) => { + const fnResult = calculateEvenSplits(testCase.size, testCase) + const { startIndex, endIndex } = fnResult || {} + + if (isArray(startIndex) && isArray(endIndex)) { + const isExpectedResult = + startIndex.length === testCase.expectedStart.length && endIndex.length === testCase.expectedEnd.length + assert.equal(isExpectedResult, true) + } else { + // null cases. + assert.equal(startIndex, expectedSplitsTestCases.expectedStart) + assert.equal(endIndex, expectedSplitsTestCases.expectedEnd) + } + }) + }) +}) + +describe('CopyConditions', () => { + let date = 'Fri, 11 Aug 2017 19:34:18 GMT' + + let cc = new Minio.CopyConditions() + + describe('#setModified', () => { + it('should take a date argument', () => { + cc.setModified(new Date(date)) + + assert.equal(cc.modified, date) + }) + + it('should throw without date', () => { + assert.throws(() => { + cc.setModified() + }, /date must be of type Date/) + + assert.throws(() => { + cc.setModified({ hi: 'there' }) + }, /date must be of type Date/) + }) + }) + + describe('#setUnmodified', () => { + it('should take a date argument', () => { + cc.setUnmodified(new Date(date)) + + assert.equal(cc.unmodified, date) + }) + + it('should throw without date', () => { + assert.throws(() => { + cc.setUnmodified() + }, /date must be of type Date/) + + assert.throws(() => { + cc.setUnmodified({ hi: 'there' }) + }, /date must be of type Date/) + }) + }) +}) + +describe('Client', function () { + var nockRequests = [] + this.timeout(5000) + beforeEach(() => { + Nock.cleanAll() + nockRequests = [] + }) + afterEach(() => { + nockRequests.forEach((element) => { + if (!element.request.isDone()) { + element.request.done() + } + }) + }) + var client = new Minio.Client({ + endPoint: 'localhost', + port: 9000, + accessKey: 'accesskey', + secretKey: 'secretkey', + useSSL: false, + }) + describe('new client', () => { + it('should work with https', () => { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + assert.equal(client.port, 443) + }) + it('should override port with http', () => { + var client = new Minio.Client({ + endPoint: 'localhost', + port: 9000, + accessKey: 'accesskey', + secretKey: 'secretkey', + useSSL: false, + }) + assert.equal(client.port, 9000) + }) + it('should work with http', () => { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + useSSL: false, + }) + assert.equal(client.port, 80) + }) + it('should override port with https', () => { + var client = new Minio.Client({ + endPoint: 'localhost', + port: 9000, + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + assert.equal(client.port, 9000) + }) + it('should fail with url', (done) => { + try { + new Minio.Client({ + endPoint: 'http://localhost:9000', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + } catch (e) { + done() + } + }) + it('should fail with alphanumeric', (done) => { + try { + new Minio.Client({ + endPoint: 'localhost##$@3', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + } catch (e) { + done() + } + }) + it('should fail with no url', (done) => { + try { + new Minio.Client({ + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + } catch (e) { + done() + } + }) + it('should fail with bad port', (done) => { + try { + new Minio.Client({ + endPoint: 'localhost', + port: -1, + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + } catch (e) { + done() + } + }) + it('should fail when secure param is passed', (done) => { + try { + new Minio.Client({ + endPoint: 'localhost', + secure: false, + port: 9000, + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + } catch (e) { + done() + } + }) + it('should fail when secure param is passed', (done) => { + try { + new Minio.Client({ + endPoint: 'localhost', + secure: true, + port: 9000, + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + } catch (e) { + done() + } + }) + }) + describe('Presigned URL', () => { + describe('presigned-get', () => { + it('should not generate presigned url with no access key', (done) => { + try { + var client = new Minio.Client({ + endPoint: 'localhost', + port: 9000, + useSSL: false, + }) + client.presignedGetObject('bucket', 'object', 1000, function () {}) + } catch (e) { + done() + } + }) + it('should not generate presigned url with wrong expires param', (done) => { + try { + client.presignedGetObject('bucket', 'object', '0', function () {}) + } catch (e) { + done() + } + }) + }) + describe('presigned-put', () => { + it('should not generate presigned url with no access key', (done) => { + try { + var client = new Minio.Client({ + endPoint: 'localhost', + port: 9000, + useSSL: false, + }) + client.presignedPutObject('bucket', 'object', 1000, function () {}) + } catch (e) { + done() + } + }) + it('should not generate presigned url with wrong expires param', (done) => { + try { + client.presignedPutObject('bucket', 'object', '0', function () {}) + } catch (e) { + done() + } + }) + }) + describe('presigned-post-policy', () => { + it('should not generate content type for undefined value', () => { + assert.throws(() => { + var policy = client.newPostPolicy() + policy.setContentType() + }, /content-type cannot be null/) + }) + it('should not generate content disposition for undefined value', () => { + assert.throws(() => { + var policy = client.newPostPolicy() + policy.setContentDisposition() + }, /content-disposition cannot be null/) + }) + it('should not generate user defined metadata for string value', () => { + assert.throws(() => { + var policy = client.newPostPolicy() + policy.setUserMetaData('123') + }, /metadata should be of type "object"/) + }) + it('should not generate user defined metadata for null value', () => { + assert.throws(() => { + var policy = client.newPostPolicy() + policy.setUserMetaData(null) + }, /metadata should be of type "object"/) + }) + it('should not generate user defined metadata for undefined value', () => { + assert.throws(() => { + var policy = client.newPostPolicy() + policy.setUserMetaData() + }, /metadata should be of type "object"/) + }) + }) + }) + describe('User Agent', () => { + it('should have a default user agent', () => { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + assert.equal(`MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version}`, client.userAgent) + }) + it('should set user agent', () => { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + client.setAppInfo('test', '3.2.1') + assert.equal( + `MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version} test/3.2.1`, + client.userAgent, + ) + }) + it('should set user agent without comments', () => { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + client.setAppInfo('test', '3.2.1') + assert.equal( + `MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version} test/3.2.1`, + client.userAgent, + ) + }) + it('should not set user agent without name', (done) => { + try { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + client.setAppInfo(null, '3.2.1') + } catch (e) { + done() + } + }) + it('should not set user agent with empty name', (done) => { + try { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + client.setAppInfo('', '3.2.1') + } catch (e) { + done() + } + }) + it('should not set user agent without version', (done) => { + try { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + client.setAppInfo('test', null) + } catch (e) { + done() + } + }) + it('should not set user agent with empty version', (done) => { + try { + var client = new Minio.Client({ + endPoint: 'localhost', + accessKey: 'accesskey', + secretKey: 'secretkey', + }) + client.setAppInfo('test', '') + } catch (e) { + done() + } + }) + }) + + describe('object level', () => { + describe('#getObject(bucket, object, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.getObject(null, 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getObject('', 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getObject(' \n \t ', 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on null object', (done) => { + try { + client.getObject('hello', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty object', (done) => { + try { + client.getObject('hello', '', function () {}) + } catch (e) { + done() + } + }) + }) + + describe('#putObject(bucket, object, source, size, contentType, callback)', () => { + describe('with small objects using single put', () => { + it('should fail when data is smaller than specified', (done) => { + var s = new Stream.Readable() + s._read = function () {} + s.push('hello world') + s.push(null) + client.putObject('bucket', 'object', s, 12, '', (e) => { + if (e) { + done() + } + }) + }) + it('should fail when data is larger than specified', (done) => { + var s = new Stream.Readable() + s._read = function () {} + s.push('hello world') + s.push(null) + client.putObject('bucket', 'object', s, 10, '', (e) => { + if (e) { + done() + } + }) + }) + it('should fail with invalid bucket name', () => { + assert.throws(() => { + client.putObject('ab', 'object', () => {}) + }, /Invalid bucket name/) + }) + it('should fail with invalid object name', () => { + assert.throws(() => { + client.putObject('bucket', '', () => {}) + }, /Invalid object name/) + }) + it('should error with size > maxObjectSize', () => { + assert.throws(() => { + client.putObject('bucket', 'object', new Stream.Readable(), client.maxObjectSize + 1, () => {}) + }, /size should not be more than/) + }) + it('should fail on null bucket', (done) => { + try { + client.putObject(null, 'hello', null, 1, '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.putObject(' \n \t ', 'hello', null, 1, '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.putObject('', 'hello', null, 1, '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on null object', (done) => { + try { + client.putObject('hello', null, null, 1, '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty object', (done) => { + try { + client.putObject('hello', '', null, 1, '', function () {}) + } catch (e) { + done() + } + }) + }) + }) + + describe('#removeAllBucketNotification()', () => { + it('should error on invalid arguments', () => { + assert.throws(() => { + client.removeAllBucketNotification( + 'ab', + () => {}, + function () {}, + ) + }, /Invalid bucket name/) + }) + }) + + describe('#setBucketNotification()', () => { + it('should error on invalid arguments', () => { + assert.throws(() => { + client.setBucketNotification('ab', () => {}) + }, /Invalid bucket name/) + assert.throws(() => { + client.setBucketNotification('bucket', 49, () => {}) + }, /notification config should be of type "Object"/) + }) + }) + + describe('#getBucketNotification()', () => { + it('should error on invalid arguments', () => { + assert.throws(() => { + client.getBucketNotification('ab', () => {}) + }, /Invalid bucket name/) + }) + }) + + describe('#listenBucketNotification', () => { + it('should error on invalid arguments', () => { + assert.throws(() => { + client.listenBucketNotification('ab', 'prefix', 'suffix', ['events']) + }, /Invalid bucket name/) + assert.throws(() => { + client.listenBucketNotification('bucket', {}, 'suffix', ['events']) + }, /prefix must be of type string/) + assert.throws(() => { + client.listenBucketNotification('bucket', '', {}, ['events']) + }, /suffix must be of type string/) + assert.throws(() => { + client.listenBucketNotification('bucket', '', '', {}) + }, /events must be of type Array/) + }) + }) + + describe('#statObject(bucket, object, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.statObject(null, 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.statObject('', 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.statObject(' \n \t ', 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on null object', (done) => { + try { + client.statObject('hello', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty object', (done) => { + try { + client.statObject('hello', '', function () {}) + } catch (e) { + done() + } + }) + + it('should fail on incompatible argument type (number) for statOpts object', (done) => { + try { + client.statObject('hello', 'testStatOpts', 1, function () {}) + } catch (e) { + done() + } + }) + it('should fail on incompatible argument type (null) for statOpts object', (done) => { + try { + client.statObject('hello', 'testStatOpts', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on incompatible argument type (sting) for statOpts object', (done) => { + try { + client.statObject('hello', 'testStatOpts', ' ', function () {}) + } catch (e) { + done() + } + }) + }) + + describe('#removeObject(bucket, object, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.removeObject(null, 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.removeObject('', 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.removeObject(' \n \t ', 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on null object', (done) => { + try { + client.removeObject('hello', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty object', (done) => { + try { + client.removeObject('hello', '', function () {}) + } catch (e) { + done() + } + }) + // Versioning related options as removeOpts + it('should fail on empty (null) removeOpts object', (done) => { + try { + client.removeObject('hello', 'testRemoveOpts', null, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty (string) removeOpts', (done) => { + try { + client.removeObject('hello', 'testRemoveOpts', '', function () {}) + } catch (e) { + done() + } + }) + }) + + describe('#removeIncompleteUpload(bucket, object, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.removeIncompleteUpload(null, 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.removeIncompleteUpload('', 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.removeIncompleteUpload(' \n \t ', 'hello', function () {}) + } catch (e) { + done() + } + }) + it('should fail on null object', (done) => { + try { + client.removeIncompleteUpload('hello', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty object', (done) => { + try { + client.removeIncompleteUpload('hello', '', function () {}) + } catch (e) { + done() + } + }) + }) + }) + + describe('Bucket Versioning APIs', () => { + describe('getBucketVersioning(bucket, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.getBucketVersioning(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getBucketVersioning('', function () {}) + } catch (e) { + done() + } + }) + }) + + describe('setBucketVersioning(bucket, versionConfig, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.setBucketVersioning(null, {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.setBucketVersioning('', {}, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty versionConfig', (done) => { + try { + client.setBucketVersioning('', null, function () {}) + } catch (e) { + done() + } + }) + }) + }) + + describe('Bucket and Object Tags APIs', () => { + describe('Set Bucket Tags ', () => { + it('should fail on null bucket', (done) => { + try { + client.setBucketTagging(null, {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.setBucketTagging('', {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail if tags are more than 50', (done) => { + const _50_plus_key_tags = {} + for (let i = 0; i < 51; i += 1) { + _50_plus_key_tags[i] = i + } + try { + client.setBucketTagging('', _50_plus_key_tags, function () {}) + } catch (e) { + done() + } + }) + }) + describe('Get Bucket Tags', () => { + it('should fail on invalid bucket', (done) => { + try { + client.getBucketTagging('nv', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on null bucket', (done) => { + try { + client.getBucketTagging(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getBucketTagging('', function () {}) + } catch (e) { + done() + } + }) + }) + describe('Remove Bucket Tags', () => { + it('should fail on null object', (done) => { + try { + client.removeBucketTagging(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.removeBucketTagging('', function () {}) + } catch (e) { + done() + } + }) + it('should fail on invalid bucket name', (done) => { + try { + client.removeBucketTagging('198.51.100.24', function () {}) + } catch (e) { + done() + } + }) + + it('should fail on invalid bucket name', (done) => { + try { + client.removeBucketTagging('xy', function () {}) + } catch (e) { + done() + } + }) + }) + describe('Put Object Tags', () => { + it('should fail on null object', (done) => { + try { + client.putObjectTagging('my-bucket-name', null, {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty object', (done) => { + try { + client.putObjectTagging('my-bucket-name', null, {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on non object tags', (done) => { + try { + client.putObjectTagging('my-bucket-name', null, 'non-obj-tag', function () {}) + } catch (e) { + done() + } + }) + it('should fail if tags are more than 50 on an object', (done) => { + const _50_plus_key_tags = {} + for (let i = 0; i < 51; i += 1) { + _50_plus_key_tags[i] = i + } + try { + client.putObjectTagging('my-bucket-name', null, _50_plus_key_tags, function () {}) + } catch (e) { + done() + } + }) + }) + describe('Get Object Tags', () => { + it('should fail on invalid bucket', (done) => { + try { + client.getObjectTagging('nv', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on null object', (done) => { + try { + client.getObjectTagging('my-bucket-name', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty object', (done) => { + try { + client.getObjectTagging('my-bucket-name', null, function () {}) + } catch (e) { + done() + } + }) + }) + describe('Remove Object Tags', () => { + it('should fail on null object', (done) => { + try { + client.removeObjectTagging('my-bucket', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.removeObjectTagging('my-bucket', '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on invalid bucket name', (done) => { + try { + client.removeObjectTagging('198.51.100.24', function () {}) + } catch (e) { + done() + } + }) + + it('should fail on invalid bucket name', (done) => { + try { + client.removeObjectTagging('xy', function () {}) + } catch (e) { + done() + } + }) + }) + }) + + describe('setBucketLifecycle(bucket, lifecycleConfig, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.setBucketLifecycle(null, null, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty bucket', (done) => { + try { + client.setBucketLifecycle('', null, function () {}) + } catch (e) { + done() + } + }) + }) + + describe('getBucketLifecycle(bucket, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.getBucketLifecycle(null, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty bucket', (done) => { + try { + client.getBucketLifecycle('', function () {}) + } catch (e) { + done() + } + }) + }) + describe('removeBucketLifecycle(bucket, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.removeBucketLifecycle(null, null, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty bucket', (done) => { + try { + client.removeBucketLifecycle('', null, function () {}) + } catch (e) { + done() + } + }) + }) + + describe('Object Locking APIs', () => { + describe('getObjectLockConfig(bucket, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.getObjectLockConfig(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getObjectLockConfig('', function () {}) + } catch (e) { + done() + } + }) + }) + + describe('setObjectLockConfig(bucket, lockConfig, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.setObjectLockConfig(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.setObjectLockConfig('', function () {}) + } catch (e) { + done() + } + }) + it('should fail on passing invalid mode ', (done) => { + try { + client.setObjectLockConfig('my-bucket', { mode: 'invalid_mode' }, function () {}) + } catch (e) { + done() + } + }) + it('should fail on passing invalid unit ', (done) => { + try { + client.setObjectLockConfig('my-bucket', { mode: 'COMPLIANCE', unit: 'invalid_unit' }, function () {}) + } catch (e) { + done() + } + }) + it('should fail on passing invalid validity ', (done) => { + try { + client.setObjectLockConfig( + 'my-bucket', + { mode: 'COMPLIANCE', unit: 'invalid_unit', validity: '' }, + function () {}, + ) + } catch (e) { + done() + } + }) + it('should fail on passing invalid config ', (done) => { + try { + client.setObjectLockConfig( + 'my-bucket', + { mode: 'COMPLIANCE', randomProp: true, nonExisting: false }, + function () {}, + ) + } catch (e) { + done() + } + }) + }) + }) + + describe('Object retention APIs', () => { + describe('getObjectRetention(bucket, objectName, getRetentionOpts,callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.getObjectRetention(null, '', '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getObjectRetention('', '', '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on invalid object name', (done) => { + try { + client.getObjectRetention('my-bucket', null, '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on invalid versionId', (done) => { + try { + client.getObjectRetention('my-bucket', 'objectname', { versionId: 123 }, function () {}) + } catch (e) { + done() + } + }) + }) + + describe('putObjectRetention(bucket, objectName, retentionConfig, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.putObjectRetention(null, '', {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.putObjectRetention('', '', {}, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on null object', (done) => { + try { + client.putObjectRetention('my-bucket', null, {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty object', (done) => { + try { + client.putObjectRetention('my-bucket', '', {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on passing invalid mode ', (done) => { + try { + client.putObjectRetention('my-bucket', 'my-object', { mode: 'invalid_mode' }, function () {}) + } catch (e) { + done() + } + }) + it('should fail on passing invalid governanceBypass ', (done) => { + try { + client.putObjectRetention('my-bucket', 'my-object', { governanceBypass: 'nonbool' }, function () {}) + } catch (e) { + done() + } + }) + it('should fail on passing invalid (null) retainUntilDate ', (done) => { + try { + client.putObjectRetention('my-bucket', 'my-object', { retainUntilDate: 12345 }, function () {}) + } catch (e) { + done() + } + }) + it('should fail on passing invalid versionId ', (done) => { + try { + client.putObjectRetention('my-bucket', { versionId: 'COMPLIANCE' }, function () {}) + } catch (e) { + done() + } + }) + }) + }) + + describe('Bucket Encryption APIs', () => { + describe('setBucketEncryption(bucket, encryptionConfig, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.setBucketEncryption(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.setBucketEncryption('', function () {}) + } catch (e) { + done() + } + }) + it('should fail on multiple rules', (done) => { + try { + client.setBucketEncryption( + 'my-bucket', + { + // Default Rule + Rule: [ + { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: 'AES256', + }, + }, + { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: 'AES256', + }, + }, + ], + }, + function () {}, + ) + } catch (e) { + done() + } + }) + }) + + describe('getBucketEncryption(bucket, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.getBucketEncryption(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getBucketEncryption('', function () {}) + } catch (e) { + done() + } + }) + }) + + describe('removeBucketEncryption(bucket, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.removeBucketEncryption(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.removeBucketEncryption('', function () {}) + } catch (e) { + done() + } + }) + }) + }) + describe('Bucket Replication APIs', () => { + describe('setBucketReplication(bucketName, replicationConfig, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.setBucketReplication(null, {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.setBucketReplication('', {}, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty replicationConfig', (done) => { + try { + client.setBucketReplication('my-bucket', {}, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty replicationConfig role', (done) => { + try { + client.setBucketReplication('my-bucket', { role: '' }, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on invalid value for replicationConfig role', (done) => { + try { + client.setBucketReplication('my-bucket', { role: 12 }, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty value for replicationConfig rules', (done) => { + try { + client.setBucketReplication('my-bucket', { role: 'arn:', rules: [] }, function () {}) + } catch (e) { + done() + } + }) + it('should fail on null value for replicationConfig rules', (done) => { + try { + client.setBucketReplication('my-bucket', { role: 'arn:', rules: null }, function () {}) + } catch (e) { + done() + } + }) + }) + + describe('getBucketReplication(bucketName, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.getBucketReplication(null, {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getBucketReplication('', {}, function () {}) + } catch (e) { + done() + } + }) + }) + + describe('removeBucketReplication(bucketName, callback)', () => { + it('should fail on null bucket', (done) => { + try { + client.removeBucketReplication(null, {}, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.removeBucketReplication('', {}, function () {}) + } catch (e) { + done() + } + }) + }) + }) + + describe('Object Legal Hold APIs', () => { + describe('getObjectLegalHold(bucketName, objectName, getOpts={}, cb)', () => { + it('should fail on null bucket', (done) => { + try { + client.getObjectLegalHold(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.getObjectLegalHold('', function () {}) + } catch (e) { + done() + } + }) + + it('should fail on null objectName', (done) => { + try { + client.getObjectLegalHold('my-bucket', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on null getOpts', (done) => { + try { + client.getObjectLegalHold('my-bucker', 'my-object', null, function () {}) + } catch (e) { + done() + } + }) + }) + + describe('setObjectLegalHold(bucketName, objectName, setOpts={}, cb)', () => { + it('should fail on null bucket', (done) => { + try { + client.setObjectLegalHold(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.setObjectLegalHold('', function () {}) + } catch (e) { + done() + } + }) + + it('should fail on null objectName', (done) => { + try { + client.setObjectLegalHold('my-bucket', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on null setOpts', (done) => { + try { + client.setObjectLegalHold('my-bucker', 'my-object', null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty versionId', (done) => { + try { + client.setObjectLegalHold('my-bucker', 'my-object', {}, function () {}) + } catch (e) { + done() + } + }) + }) + }) + + describe('Compose Object APIs', () => { + describe('composeObject(destObjConfig, sourceObjectList,cb)', () => { + it('should fail on null destination config', (done) => { + try { + client.composeObject(null, function () {}) + } catch (e) { + done() + } + }) + + it('should fail on no array source config', (done) => { + try { + const destOptions = new CopyDestinationOptions({ Bucket: 'test-bucket', Object: 'test-object' }) + client.composeObject(destOptions, 'non-array', function () {}) + } catch (e) { + done() + } + }) + + it('should fail on null source config', (done) => { + try { + const destOptions = new CopyDestinationOptions({ Bucket: 'test-bucket', Object: 'test-object' }) + client.composeObject(destOptions, null, function () {}) + } catch (e) { + done() + } + }) + }) + }) + describe('Select Object Content APIs', () => { + describe('selectObjectContent(bucketName, objectName, selectOpts={}, cb)', () => { + it('should fail on null bucket', (done) => { + try { + client.selectObjectContent(null, function () {}) + } catch (e) { + done() + } + }) + it('should fail on empty bucket', (done) => { + try { + client.selectObjectContent('', function () {}) + } catch (e) { + done() + } + }) + + it('should fail on empty object', (done) => { + try { + client.selectObjectContent('my-bucket', '', function () {}) + } catch (e) { + done() + } + }) + it('should fail on null object', (done) => { + try { + client.selectObjectContent('my-bucket', null, function () {}) + } catch (e) { + done() + } + }) + }) + }) +}) + +describe('IP Address Validations', () => { + it('should validate for valid ip', () => { + assert.equal(isValidIP('1.1.1.1'), true) + }) + + it('Check list of IPV4 Invalid addresses', () => { + const invalidIpv4 = [ + ' 127.0.0.1', + '127.0.0.1 ', + '127.0.0.1 127.0.0.1', + '127.0.0.256', + '127.0.0.1//1', + '127.0.0.1/0x1', + '127.0.0.1/-1', + '127.0.0.1/ab', + '127.0.0.1/', + '127.0.0.256/32', + '127.0.0.1/33', + ] + invalidIpv4.map((ip) => { + assert.equal(isValidIP(ip), false) + }) + }) + + it('Check list of IPV4 Valid addresses', () => { + const validIpv4 = ['001.002.003.004', '127.0.0.1', '255.255.255.255', '192.168.1.10'] + validIpv4.map((ip) => { + assert.equal(isValidIP(ip), true) + }) + }) + + it('Check list of IPV6 Invalid addresses', () => { + const invalidIpV6 = [ + "':10.0.0.1", + '-1', + '::1 ::1', + '1.2.3.4:1111:2222:3333:4444::5555', + '1.2.3.4:1111:2222:3333::5555', + '1.2.3.4:1111:2222::5555', + '1.2.3.4:1111::5555', + '1.2.3.4::', + '1.2.3.4::5555', + '11112222:3333:4444:5555:6666:1.2.3.4', + '11112222:3333:4444:5555:6666:7777:8888', + '::1//64', + '::1/0001', + '1111:', + '1111:1.2.3.4', + '1111:2222', + '1111:22223333:4444:5555:6666:1.2.3.4', + '1111:22223333:4444:5555:6666:7777:8888', + '1111:2222:', + '1111:2222:1.2.3.4', + '1111:2222:3333', + '1111:2222:33334444:5555:6666:1.2.3.4', + '1111:2222:33334444:5555:6666:7777:8888', + '1111:2222:3333:', + '1111:2222:3333:1.2.3.4', + '1111:2222:3333:4444', + '1111:2222:3333:44445555:6666:1.2.3.4', + '1111:2222:3333:44445555:6666:7777:8888', + '1111:2222:3333:4444:', + '1111:2222:3333:4444:1.2.3.4', + '1111:2222:3333:4444:5555', + '1111:2222:3333:4444:55556666:1.2.3.4', + '1111:2222:3333:4444:55556666:7777:8888', + '1111:2222:3333:4444:5555:', + '1111:2222:3333:4444:5555:1.2.3.4', + '1111:2222:3333:4444:5555:6666', + '1111:2222:3333:4444:5555:66661.2.3.4', + '1111:2222:3333:4444:5555:66667777:8888', + '1111:2222:3333:4444:5555:6666:', + '1111:2222:3333:4444:5555:6666:1.2.3.4.5', + '1111:2222:3333:4444:5555:6666:255.255.255255', + '1111:2222:3333:4444:5555:6666:255.255255.255', + '1111:2222:3333:4444:5555:6666:255255.255.255', + '1111:2222:3333:4444:5555:6666:256.256.256.256', + '1111:2222:3333:4444:5555:6666:7777', + '1111:2222:3333:4444:5555:6666:77778888', + '1111:2222:3333:4444:5555:6666:7777:', + '1111:2222:3333:4444:5555:6666:7777:1.2.3.4', + '1111:2222:3333:4444:5555:6666:7777:::', + '1111:2222:3333:4444:5555:6666::8888:', + '1111:2222:3333:4444:5555:6666:::', + '1111:2222:3333:4444:5555:6666:::8888', + '1111:2222:3333:4444:5555::7777:8888:', + '1111:2222:3333:4444:5555::7777::', + '1111:2222:3333:4444:5555::8888:', + '1111:2222:3333:4444:5555:::', + '1111:2222:3333:4444:5555:::1.2.3.4', + '1111:2222:3333:4444:5555:::7777:8888', + '1111:2222:3333:4444::5555:', + '1111:2222:3333:4444::6666:7777:8888:', + '1111:2222:3333:4444::6666:7777::', + '1111:2222:3333:4444::6666::8888', + '1111:2222:3333:4444::7777:8888:', + '1111:2222:3333:4444::8888:', + '1111:2222:3333:4444:::', + '1111:2222:3333:4444:::6666:1.2.3.4', + '1111:2222:3333:4444:::6666:7777:8888', + '1111:2222:3333::5555:', + '1111:2222:3333::5555:6666:7777:8888:', + '1111:2222:3333::5555:6666:7777::', + '1111:2222:3333::5555:6666::8888', + '1111:2222:3333::5555::1.2.3.4', + '1111:2222:3333::5555::7777:8888', + '1111:2222:3333::6666:7777:8888:', + '1111:2222:3333::7777:8888:', + '1111:2222:3333::8888:', + '1111:2222:3333:::', + '1111:2222:3333:::5555:6666:1.2.3.4', + '1111:2222:3333:::5555:6666:7777:8888', + '1111:2222::4444:5555:6666:7777:8888:', + '1111:2222::4444:5555:6666:7777::', + '1111:2222::4444:5555:6666::8888', + '1111:2222::4444:5555::1.2.3.4', + '1111:2222::4444:5555::7777:8888', + '1111:2222::4444::6666:1.2.3.4', + '1111:2222::4444::6666:7777:8888', + '1111:2222::5555:', + '1111:2222::5555:6666:7777:8888:', + '1111:2222::6666:7777:8888:', + '1111:2222::7777:8888:', + '1111:2222::8888:', + '1111:2222:::', + '1111:2222:::4444:5555:6666:1.2.3.4', + '1111:2222:::4444:5555:6666:7777:8888', + '1111::3333:4444:5555:6666:7777:8888:', + '1111::3333:4444:5555:6666:7777::', + '1111::3333:4444:5555:6666::8888', + '1111::3333:4444:5555::1.2.3.4', + '1111::3333:4444:5555::7777:8888', + '1111::3333:4444::6666:1.2.3.4', + '1111::3333:4444::6666:7777:8888', + '1111::3333::5555:6666:1.2.3.4', + '1111::3333::5555:6666:7777:8888', + '1111::4444:5555:6666:7777:8888:', + '1111::5555:', + '1111::5555:6666:7777:8888:', + '1111::6666:7777:8888:', + '1111::7777:8888:', + '1111::8888:', + '1111:::', + '1111:::3333:4444:5555:6666:1.2.3.4', + '1111:::3333:4444:5555:6666:7777:8888', + '12345::6:7:8', + '124.15.6.89/60', + '1:2:3:4:5:6:7:8:9', + '1:2:3::4:5:6:7:8:9', + '1:2:3::4:5::7:8', + '1::1.2.256.4', + '1::1.2.3.256', + '1::1.2.3.300', + '1::1.2.3.900', + '1::1.2.300.4', + '1::1.2.900.4', + '1::1.256.3.4', + '1::1.300.3.4', + '1::1.900.3.4', + '1::256.2.3.4', + '1::260.2.3.4', + '1::2::3', + '1::300.2.3.4', + '1::300.300.300.300', + '1::3000.30.30.30', + '1::400.2.3.4', + '1::5:1.2.256.4', + '1::5:1.2.3.256', + '1::5:1.2.3.300', + '1::5:1.2.3.900', + '1::5:1.2.300.4', + '1::5:1.2.900.4', + '1::5:1.256.3.4', + '1::5:1.300.3.4', + '1::5:1.900.3.4', + '1::5:256.2.3.4', + '1::5:260.2.3.4', + '1::5:300.2.3.4', + '1::5:300.300.300.300', + '1::5:3000.30.30.30', + '1::5:400.2.3.4', + '1::5:900.2.3.4', + '1::900.2.3.4', + '1:::3:4:5', + '2001:0000:1234: 0000:0000:C1C0:ABCD:0876', + '2001:0000:1234:0000:0000:C1C0:ABCD:0876 0', + '2001:1:1:1:1:1:255Z255X255Y255', + '2001::FFD3::57ab', + '2001:DB8:0:0:8:800:200C:417A:221', + '2001:db8:85a3::8a2e:37023:7334', + '2001:db8:85a3::8a2e:370k:7334', + '3ffe:0b00:0000:0001:0000:0000:000a', + '3ffe:b00::1::a', + ':', + ':1.2.3.4', + ':1111:2222:3333:4444:5555:6666:1.2.3.4', + ':1111:2222:3333:4444:5555:6666:7777:8888', + ':1111:2222:3333:4444:5555:6666:7777::', + ':1111:2222:3333:4444:5555:6666::', + ':1111:2222:3333:4444:5555:6666::8888', + ':1111:2222:3333:4444:5555::', + ':1111:2222:3333:4444:5555::1.2.3.4', + ':1111:2222:3333:4444:5555::7777:8888', + ':1111:2222:3333:4444:5555::8888', + ':1111:2222:3333:4444::', + ':1111:2222:3333:4444::1.2.3.4', + ':1111:2222:3333:4444::5555', + ':1111:2222:3333:4444::6666:1.2.3.4', + ':1111:2222:3333:4444::6666:7777:8888', + ':1111:2222:3333:4444::7777:8888', + ':1111:2222:3333:4444::8888', + ':1111:2222:3333::', + ':1111:2222:3333::1.2.3.4', + ':1111:2222:3333::5555', + ':1111:2222:3333::5555:6666:1.2.3.4', + ':1111:2222:3333::5555:6666:7777:8888', + ':1111:2222:3333::6666:1.2.3.4', + ':1111:2222:3333::6666:7777:8888', + ':1111:2222:3333::7777:8888', + ':1111:2222:3333::8888', + ':1111:2222::', + ':1111:2222::1.2.3.4', + ':1111:2222::4444:5555:6666:1.2.3.4', + ':1111:2222::4444:5555:6666:7777:8888', + ':1111:2222::5555', + ':1111:2222::5555:6666:1.2.3.4', + ':1111:2222::5555:6666:7777:8888', + ':1111:2222::6666:1.2.3.4', + ':1111:2222::6666:7777:8888', + ':1111:2222::7777:8888', + ':1111:2222::8888', + ':1111::', + ':1111::1.2.3.4', + ':1111::3333:4444:5555:6666:1.2.3.4', + ':1111::3333:4444:5555:6666:7777:8888', + ':1111::4444:5555:6666:1.2.3.4', + ':1111::4444:5555:6666:7777:8888', + ':1111::5555', + ':1111::5555:6666:1.2.3.4', + ':1111::5555:6666:7777:8888', + ':1111::6666:1.2.3.4', + ':1111::6666:7777:8888', + ':1111::7777:8888', + ':1111::8888', + ':2222:3333:4444:5555:6666:1.2.3.4', + ':2222:3333:4444:5555:6666:7777:8888', + ':3333:4444:5555:6666:1.2.3.4', + ':3333:4444:5555:6666:7777:8888', + ':4444:5555:6666:1.2.3.4', + ':4444:5555:6666:7777:8888', + ':5555:6666:1.2.3.4', + ':5555:6666:7777:8888', + ':6666:1.2.3.4', + ':6666:7777:8888', + ':7777:8888', + ':8888', + '::-1', + '::.', + '::..', + '::...', + '::...4', + '::..3.', + '::..3.4', + '::.2..', + '::.2.3.', + '::.2.3.4', + '::1...', + '::1.2..', + '::1.2.256.4', + '::1.2.3.', + '::1.2.3.256', + '::1.2.3.300', + '::1.2.3.900', + '::1.2.300.4', + '::1.2.900.4', + '::1.256.3.4', + '::1.300.3.4', + '::1.900.3.4', + '::1111:2222:3333:4444:5555:6666::', + '::2222:3333:4444:5555:6666:7777:8888:', + '::2222:3333:4444:5555:7777:8888::', + '::2222:3333:4444:5555:7777::8888', + '::2222:3333:4444:5555::1.2.3.4', + '::2222:3333:4444:5555::7777:8888', + '::2222:3333:4444::6666:1.2.3.4', + '::2222:3333:4444::6666:7777:8888', + '::2222:3333::5555:6666:1.2.3.4', + '::2222:3333::5555:6666:7777:8888', + '::2222::4444:5555:6666:1.2.3.4', + '::2222::4444:5555:6666:7777:8888', + '::256.2.3.4', + '::260.2.3.4', + '::300.2.3.4', + '::300.300.300.300', + '::3000.30.30.30', + '::3333:4444:5555:6666:7777:8888:', + '::400.2.3.4', + '::4444:5555:6666:7777:8888:', + '::5555:', + '::5555:6666:7777:8888:', + '::6666:7777:8888:', + '::7777:8888:', + '::8888:', + '::900.2.3.4', + ':::', + ':::1.2.3.4', + ':::2222:3333:4444:5555:6666:1.2.3.4', + ':::2222:3333:4444:5555:6666:7777:8888', + ':::3333:4444:5555:6666:7777:8888', + ':::4444:5555:6666:1.2.3.4', + ':::4444:5555:6666:7777:8888', + ':::5555', + ':::5555:6666:1.2.3.4', + ':::5555:6666:7777:8888', + ':::6666:1.2.3.4', + ':::6666:7777:8888', + ':::7777:8888', + ':::8888', + '::ffff:192x168.1.26', + '::ffff:2.3.4', + '::ffff:257.1.2.3', + 'FF01::101::2', + 'FF02:0000:0000:0000:0000:0000:0000:0000:0001', + 'XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:1.2.3.4', + 'XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX', + 'a::b::c', + 'a::g', + 'a:a:a:a:a:a:a:a:a', + 'a:aaaaa::', + 'a:b', + 'a:b:c:d:e:f:g:0', + 'ffff:', + 'ffff::ffff::ffff', + 'ffgg:ffff:ffff:ffff:ffff:ffff:ffff:ffff', + 'ldkfj', + '::/129', + '1000:://32', + '::/', + ] + invalidIpV6.map((ip) => { + const valid = isValidIP(ip) + assert.equal(valid, false) + }) + }) + + it('Check list of IPV6 Valid addresses', () => { + const validIpv6 = [ + '0000:0000:0000:0000:0000:0000:0000:0000', + '0000:0000:0000:0000:0000:0000:0000:0001', + '0:0:0:0:0:0:0:0', + '0:0:0:0:0:0:0:1', + '0:0:0:0:0:0:0::', + '0:0:0:0:0:0:13.1.68.3', + '0:0:0:0:0:0::', + '0:0:0:0:0::', + '0:0:0:0:0:FFFF:129.144.52.38', + '0:0:0:0:1:0:0:0', + '0:0:0:0::', + '0:0:0::', + '0:0::', + '0:1:2:3:4:5:6:7', + '0::', + '0:a:b:c:d:e:f::', + '1080:0:0:0:8:800:200c:417a', + '1080::8:800:200c:417a', + '1111:2222:3333:4444:5555:6666:123.123.123.123', + '1111:2222:3333:4444:5555:6666:7777:8888', + '1111:2222:3333:4444:5555:6666:7777::', + '1111:2222:3333:4444:5555:6666::', + '1111:2222:3333:4444:5555:6666::8888', + '1111:2222:3333:4444:5555::', + '1111:2222:3333:4444:5555::7777:8888', + '1111:2222:3333:4444:5555::8888', + '1111:2222:3333:4444::', + '1111:2222:3333:4444::6666:123.123.123.123', + '1111:2222:3333:4444::6666:7777:8888', + '1111:2222:3333:4444::7777:8888', + '1111:2222:3333:4444::8888', + '1111:2222:3333::', + '1111:2222:3333::5555:6666:123.123.123.123', + '1111:2222:3333::5555:6666:7777:8888', + '1111:2222:3333::6666:123.123.123.123', + '1111:2222:3333::6666:7777:8888', + '1111:2222:3333::7777:8888', + '1111:2222:3333::8888', + '1111:2222::', + '1111:2222::4444:5555:6666:123.123.123.123', + '1111:2222::4444:5555:6666:7777:8888', + '1111:2222::5555:6666:123.123.123.123', + '1111:2222::5555:6666:7777:8888', + '1111:2222::6666:123.123.123.123', + '1111:2222::6666:7777:8888', + '1111:2222::7777:8888', + '1111:2222::8888', + '1111::', + '1111::3333:4444:5555:6666:123.123.123.123', + '1111::3333:4444:5555:6666:7777:8888', + '1111::4444:5555:6666:123.123.123.123', + '1111::4444:5555:6666:7777:8888', + '1111::5555:6666:123.123.123.123', + '1111::5555:6666:7777:8888', + '1111::6666:123.123.123.123', + '1111::6666:7777:8888', + '1111::7777:8888', + '1111::8888', + '1:2:3:4:5:6:1.2.3.4', + '1:2:3:4:5:6:7:8', + '1:2:3:4:5:6::', + '1:2:3:4:5:6::8', + '1:2:3:4:5::', + '1:2:3:4:5::7:8', + '1:2:3:4:5::8', + '1:2:3:4::', + '1:2:3:4::5:1.2.3.4', + '1:2:3:4::7:8', + '1:2:3:4::8', + '1:2:3::', + '1:2:3::5:1.2.3.4', + '1:2:3::7:8', + '1:2:3::8', + '1:2::', + '1:2::5:1.2.3.4', + '1:2::7:8', + '1:2::8', + '1::', + '1::2:3', + '1::2:3:4', + '1::2:3:4:5', + '1::2:3:4:5:6', + '1::2:3:4:5:6:7', + '1::5:1.2.3.4', + '1::5:11.22.33.44', + '1::7:8', + '1::8', + '2001:0000:1234:0000:0000:C1C0:ABCD:0876', + '2001:0000:4136:e378:8000:63bf:3fff:fdd2', + '2001:0db8:0000:0000:0000:0000:1428:57ab', + '2001:0db8:0000:0000:0000::1428:57ab', + '2001:0db8:0:0:0:0:1428:57ab', + '2001:0db8:0:0::1428:57ab', + '2001:0db8:1234:0000:0000:0000:0000:0000', + '2001:0db8:1234::', + '2001:0db8:1234:ffff:ffff:ffff:ffff:ffff', + '2001:0db8:85a3:0000:0000:8a2e:0370:7334', + '2001:0db8::1428:57ab', + '2001::CE49:7601:2CAD:DFFF:7C94:FFFE', + '2001::CE49:7601:E866:EFFF:62C3:FFFE', + '2001:DB8:0:0:8:800:200C:417A', + '2001:DB8::8:800:200C:417A', + '2001:db8:85a3:0:0:8a2e:370:7334', + '2001:db8:85a3::8a2e:370:7334', + '2001:db8::', + '2001:db8::1428:57ab', + '2001:db8:a::123', + '2002::', + '2608::3:5', + '2608:af09:30:0:0:0:0:134', + '2608:af09:30::102a:7b91:c239:baff', + '2::10', + '3ffe:0b00:0000:0000:0001:0000:0000:000a', + '7:6:5:4:3:2:1:0', + '::', + '::0', + '::0:0', + '::0:0:0', + '::0:0:0:0', + '::0:0:0:0:0', + '::0:0:0:0:0:0', + '::0:0:0:0:0:0:0', + '::0:a:b:c:d:e:f', + '::1', + '::123.123.123.123', + '::13.1.68.3', + '::2222:3333:4444:5555:6666:123.123.123.123', + '::2222:3333:4444:5555:6666:7777:8888', + '::2:3', + '::2:3:4', + '::2:3:4:5', + '::2:3:4:5:6', + '::2:3:4:5:6:7', + '::2:3:4:5:6:7:8', + '::3333:4444:5555:6666:7777:8888', + '::4444:5555:6666:123.123.123.123', + '::4444:5555:6666:7777:8888', + '::5555:6666:123.123.123.123', + '::5555:6666:7777:8888', + '::6666:123.123.123.123', + '::6666:7777:8888', + '::7777:8888', + '::8', + '::8888', + '::FFFF:129.144.52.38', + '::ffff:0:0', + '::ffff:0c22:384e', + '::ffff:12.34.56.78', + '::ffff:192.0.2.128', + '::ffff:192.168.1.1', + '::ffff:192.168.1.26', + '::ffff:c000:280', + 'FF01:0:0:0:0:0:0:101', + 'FF01::101', + 'FF02:0000:0000:0000:0000:0000:0000:0001', + 'a:b:c:d:e:f:0::', + 'fe80:0000:0000:0000:0204:61ff:fe9d:f156', + 'fe80:0:0:0:204:61ff:254.157.241.86', + 'fe80:0:0:0:204:61ff:fe9d:f156', + 'fe80::', + 'fe80::1', + 'fe80::204:61ff:254.157.241.86', + 'fe80::204:61ff:fe9d:f156', + 'fe80::217:f2ff:254.7.237.98', + 'fe80::217:f2ff:fe07:ed62', + 'fedc:ba98:7654:3210:fedc:ba98:7654:3210', + 'ff02::1', + 'ffff::', + 'ffff::3:5', + 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', + 'a:0::0:b', + 'a:0:0::0:b', + 'a:0::0:0:b', + 'a::0:0:b', + 'a::0:b', + 'a:0::b', + 'a:0:0::b', + ] + validIpv6.map((ip) => { + const valid = isValidIP(ip) + assert.equal(valid, true) + }) + }) +}) diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..bba8d185 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "ESNext", + "moduleResolution": "Node", + "module": "ESNext", + "strict": false, + "noImplicitAny": false, + "strictNullChecks": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "allowImportingTsExtensions": true, + "declaration": true, + "declarationMap": true, + "emitDeclarationOnly": true, + "sourceMap": true, + "pretty": true, + "outDir": "./dist/main/", + "rootDir": "./src" + } +} diff --git a/types/minio.d.ts b/types/minio.d.ts new file mode 100644 index 00000000..dbd85e74 --- /dev/null +++ b/types/minio.d.ts @@ -0,0 +1,775 @@ +// imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/93cfb0ec069731dcdfc31464788613f7cddb8192/types/minio/index.d.ts + +import { EventEmitter } from 'node:events' +import type { RequestOptions } from 'node:https' +import type { Readable as ReadableStream } from 'node:stream' + +// Exports only from typings +export type Region = + | 'us-east-1' + | 'us-west-1' + | 'us-west-2' + | 'eu-west-1' + | 'eu-central-1' + | 'ap-southeast-1' + | 'ap-northeast-1' + | 'ap-southeast-2' + | 'sa-east-1' + | 'cn-north-1' + | string +export type NotificationEvent = + | 's3:ObjectCreated:*' + | 's3:ObjectCreated:Put' + | 's3:ObjectCreated:Post' + | 's3:ObjectCreated:Copy' + | 's3:ObjectCreated:CompleteMultipartUpload' + | 's3:ObjectRemoved:*' + | 's3:ObjectRemoved:Delete' + | 's3:ObjectRemoved:DeleteMarkerCreated' + | 's3:ReducedRedundancyLostObject' + | 's3:TestEvent' + | 's3:ObjectRestore:Post' + | 's3:ObjectRestore:Completed' + | 's3:Replication:OperationFailedReplication' + | 's3:Replication:OperationMissedThreshold' + | 's3:Replication:OperationReplicatedAfterThreshold' + | 's3:Replication:OperationNotTracked' + | string +export type Mode = 'COMPLIANCE' | 'GOVERNANCE' +export type LockUnit = 'Days' | 'Years' +export type LegalHoldStatus = 'ON' | 'OFF' +export type NoResultCallback = (error: Error | null) => void +export type ResultCallback = (error: Error | null, result: T) => void +export type VersioningConfig = Record +export type TagList = Record +export type EmptyObject = Record +export type VersionIdentificator = Pick +export type Lifecycle = LifecycleConfig | null | '' +export type Lock = LockConfig | EmptyObject +export type Encryption = EncryptionConfig | EmptyObject +export type Retention = RetentionOptions | EmptyObject +export type IsoDate = string + +export interface ClientOptions { + endPoint: string + accessKey: string + secretKey: string + useSSL?: boolean | undefined + port?: number | undefined + region?: Region | undefined + transport?: any + sessionToken?: string | undefined + partSize?: number | undefined + pathStyle?: boolean | undefined +} + +export interface BucketItemFromList { + name: string + creationDate: Date +} + +export interface BucketItemCopy { + etag: string + lastModified: Date +} + +export interface BucketItem { + name: string + prefix: string + size: number + etag: string + lastModified: Date +} + +export interface BucketItemWithMetadata extends BucketItem { + metadata: ItemBucketMetadata | ItemBucketMetadataList +} + +export interface BucketItemStat { + size: number + etag: string + lastModified: Date + metaData: ItemBucketMetadata +} + +export interface IncompleteUploadedBucketItem { + key: string + uploadId: string + size: number +} + +export interface BucketStream extends ReadableStream { + on(event: 'data', listener: (item: T) => void): this + + on(event: 'end' | 'pause' | 'readable' | 'resume' | 'close', listener: () => void): this + + on(event: 'error', listener: (err: Error) => void): this + + on(event: string | symbol, listener: (...args: any[]) => void): this +} + +export interface PostPolicyResult { + postURL: string + formData: { + [key: string]: any + } +} + +export interface MetadataItem { + Key: string + Value: string +} + +export interface ItemBucketMetadataList { + Items: MetadataItem[] +} + +export interface ItemBucketMetadata { + [key: string]: any +} + +export interface UploadedObjectInfo { + etag: string + versionId: string | null +} + +export interface Tag { + Key: string + Value: string +} + +export interface LifecycleConfig { + Rule: LifecycleRule[] +} + +export interface LifecycleRule { + [key: string]: any +} + +export interface LockConfig { + mode: Mode + unit: LockUnit + validity: number +} + +export interface EncryptionConfig { + Rule: EncryptionRule[] +} + +export interface EncryptionRule { + [key: string]: any +} + +export interface ReplicationConfig { + role: string + rules: [] +} + +export interface ReplicationConfig { + [key: string]: any +} + +export interface RetentionOptions { + versionId: string + mode?: Mode + retainUntilDate?: IsoDate + governanceBypass?: boolean +} + +export interface LegalHoldOptions { + versionId: string + status: LegalHoldStatus +} + +export interface InputSerialization { + CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' + CSV?: { + AllowQuotedRecordDelimiter?: boolean + Comments?: string + FieldDelimiter?: string + FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' + QuoteCharacter?: string + QuoteEscapeCharacter?: string + RecordDelimiter?: string + } + JSON?: { + Type: 'DOCUMENT' | 'LINES' + } + Parquet?: EmptyObject +} + +export interface OutputSerialization { + CSV?: { + FieldDelimiter?: string + QuoteCharacter?: string + QuoteEscapeCharacter?: string + QuoteFields?: string + RecordDelimiter?: string + } + JSON?: { + RecordDelimiter?: string + } +} + +export interface SelectOptions { + expression: string + expressionType?: string + inputSerialization: InputSerialization + outputSerialization: OutputSerialization + requestProgress?: { Enabled: boolean } + scanRange?: { Start: number; End: number } +} + +export interface SourceObjectStats { + size: number + metaData: string + lastModicied: Date + versionId: string + etag: string +} + +// No need to export this. But without it - linter error. +export class TargetConfig { + setId(id: any): void + + addEvent(newEvent: any): void + + addFilterSuffix(suffix: any): void + + addFilterPrefix(prefix: any): void +} + +export interface MakeBucketOpt { + ObjectLocking: boolean +} + +export interface RemoveOptions { + versionId?: string + governanceBypass?: boolean +} + +// Exports from library +export class Client { + constructor(options: ClientOptions) + + // Bucket operations + makeBucket(bucketName: string, region: Region, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + makeBucket(bucketName: string, region: Region, callback: NoResultCallback): void + makeBucket(bucketName: string, callback: NoResultCallback): void + makeBucket(bucketName: string, region?: Region, makeOpts?: MakeBucketOpt): Promise + + listBuckets(callback: ResultCallback): void + listBuckets(): Promise + + bucketExists(bucketName: string, callback: ResultCallback): void + bucketExists(bucketName: string): Promise + + removeBucket(bucketName: string, callback: NoResultCallback): void + removeBucket(bucketName: string): Promise + + listObjects(bucketName: string, prefix?: string, recursive?: boolean): BucketStream + + listObjectsV2(bucketName: string, prefix?: string, recursive?: boolean, startAfter?: string): BucketStream + + listIncompleteUploads( + bucketName: string, + prefix?: string, + recursive?: boolean, + ): BucketStream + + getBucketVersioning(bucketName: string, callback: ResultCallback): void + getBucketVersioning(bucketName: string): Promise + + setBucketVersioning(bucketName: string, versioningConfig: any, callback: NoResultCallback): void + setBucketVersioning(bucketName: string, versioningConfig: any): Promise + + getBucketTagging(bucketName: string, callback: ResultCallback): void + getBucketTagging(bucketName: string): Promise + + setBucketTagging(bucketName: string, tags: TagList, callback: NoResultCallback): void + setBucketTagging(bucketName: string, tags: TagList): Promise + + removeBucketTagging(bucketName: string, callback: NoResultCallback): void + removeBucketTagging(bucketName: string): Promise + + setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle, callback: NoResultCallback): void + setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle): Promise + + getBucketLifecycle(bucketName: string, callback: ResultCallback): void + getBucketLifecycle(bucketName: string): Promise + + removeBucketLifecycle(bucketName: string, callback: NoResultCallback): void + removeBucketLifecycle(bucketName: string): Promise + + setObjectLockConfig(bucketName: string, callback: NoResultCallback): void + setObjectLockConfig(bucketName: string, lockConfig: Lock, callback: NoResultCallback): void + setObjectLockConfig(bucketName: string, lockConfig?: Lock): Promise + + getObjectLockConfig(bucketName: string, callback: ResultCallback): void + getObjectLockConfig(bucketName: string): Promise + + getBucketEncryption(bucketName: string, callback: ResultCallback): void + getBucketEncryption(bucketName: string): Promise + + setBucketEncryption(bucketName: string, encryptionConfig: Encryption, callback: NoResultCallback): void + setBucketEncryption(bucketName: string, encryptionConfig: Encryption): Promise + + removeBucketEncryption(bucketName: string, callback: NoResultCallback): void + removeBucketEncryption(bucketName: string): Promise + + setBucketReplication(bucketName: string, replicationConfig: ReplicationConfig, callback: NoResultCallback): void + setBucketReplication(bucketName: string, replicationConfig: ReplicationConfig): Promise + + getBucketReplication(bucketName: string, callback: ResultCallback): void + getBucketReplication(bucketName: string): Promise + + removeBucketReplication(bucketName: string, callback: NoResultCallback): void + removeBucketReplication(bucketName: string): Promise + + // Object operations + getObject(bucketName: string, objectName: string, callback: ResultCallback): void + getObject(bucketName: string, objectName: string): Promise + + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + callback: ResultCallback, + ): void + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length: number, + callback: ResultCallback, + ): void + getPartialObject(bucketName: string, objectName: string, offset: number, length?: number): Promise + + fGetObject(bucketName: string, objectName: string, filePath: string, callback: NoResultCallback): void + fGetObject(bucketName: string, objectName: string, filePath: string): Promise + + putObject( + bucketName: string, + objectName: string, + stream: ReadableStream | Buffer | string, + callback: ResultCallback, + ): void + putObject( + bucketName: string, + objectName: string, + stream: ReadableStream | Buffer | string, + size: number, + callback: ResultCallback, + ): void + putObject( + bucketName: string, + objectName: string, + stream: ReadableStream | Buffer | string, + size: number, + metaData: ItemBucketMetadata, + callback: ResultCallback, + ): void + putObject( + bucketName: string, + objectName: string, + stream: ReadableStream | Buffer | string, + size?: number, + metaData?: ItemBucketMetadata, + ): Promise + putObject( + bucketName: string, + objectName: string, + stream: ReadableStream | Buffer | string, + metaData?: ItemBucketMetadata, + ): Promise + + fPutObject( + bucketName: string, + objectName: string, + filePath: string, + metaData: ItemBucketMetadata, + callback: ResultCallback, + ): void + fPutObject( + bucketName: string, + objectName: string, + filePath: string, + metaData?: ItemBucketMetadata, + ): Promise + + copyObject( + bucketName: string, + objectName: string, + sourceObject: string, + conditions: CopyConditions, + callback: ResultCallback, + ): void + copyObject( + bucketName: string, + objectName: string, + sourceObject: string, + conditions: CopyConditions, + ): Promise + + statObject(bucketName: string, objectName: string, callback: ResultCallback): void + statObject(bucketName: string, objectName: string): Promise + + removeObject(bucketName: string, objectName: string, removeOpts: RemoveOptions, callback: NoResultCallback): void + removeObject(bucketName: string, objectName: string, callback: NoResultCallback): void + removeObject(bucketName: string, objectName: string, removeOpts?: RemoveOptions): Promise + + removeObjects(bucketName: string, objectsList: string[], callback: NoResultCallback): void + removeObjects(bucketName: string, objectsList: string[]): Promise + + removeIncompleteUpload(bucketName: string, objectName: string, callback: NoResultCallback): void + removeIncompleteUpload(bucketName: string, objectName: string): Promise + + putObjectRetention(bucketName: string, objectName: string, callback: NoResultCallback): void + putObjectRetention( + bucketName: string, + objectName: string, + retentionOptions: Retention, + callback: NoResultCallback, + ): void + putObjectRetention(bucketName: string, objectName: string, retentionOptions?: Retention): Promise + + getObjectRetention( + bucketName: string, + objectName: string, + options: VersionIdentificator, + callback: ResultCallback, + ): void + getObjectRetention(bucketName: string, objectName: string, options: VersionIdentificator): Promise + + // It seems, putObjectTagging is deprecated in favor or setObjectTagging - there is no such a method in the library source code + /** + * @deprecated Use setObjectTagging instead. + */ + putObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void + /** + * @deprecated Use setObjectTagging instead. + */ + putObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions: VersionIdentificator, + callback: NoResultCallback, + ): void + /** + * @deprecated Use setObjectTagging instead. + */ + putObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions?: VersionIdentificator, + ): Promise + + setObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void + setObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions: VersionIdentificator, + callback: NoResultCallback, + ): void + setObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions?: VersionIdentificator, + ): Promise + + removeObjectTagging(bucketName: string, objectName: string, callback: NoResultCallback): void + removeObjectTagging( + bucketName: string, + objectName: string, + removeOptions: VersionIdentificator, + callback: NoResultCallback, + ): void + removeObjectTagging(bucketName: string, objectName: string, removeOptions?: VersionIdentificator): Promise + + getObjectTagging(bucketName: string, objectName: string, callback: ResultCallback): void + getObjectTagging( + bucketName: string, + objectName: string, + getOptions: VersionIdentificator, + callback: ResultCallback, + ): void + getObjectTagging(bucketName: string, objectName: string, getOptions?: VersionIdentificator): Promise + + getObjectLegalHold(bucketName: string, objectName: string, callback: ResultCallback): void + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptions: VersionIdentificator, + callback: ResultCallback, + ): void + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptions?: VersionIdentificator, + ): Promise + + setObjectLegalHold(bucketName: string, objectName: string, callback: NoResultCallback): void + setObjectLegalHold( + bucketName: string, + objectName: string, + setOptions: LegalHoldOptions, + callback: NoResultCallback, + ): void + setObjectLegalHold(bucketName: string, objectName: string, setOptions?: LegalHoldOptions): Promise + + composeObject( + destObjConfig: CopyDestinationOptions, + sourceObjList: CopySourceOptions[], + callback: ResultCallback, + ): void + composeObject(destObjConfig: CopyDestinationOptions, sourceObjList: CopySourceOptions[]): Promise + + selectObjectContent( + bucketName: string, + objectName: string, + selectOpts: SelectOptions, + callback: NoResultCallback, + ): void + selectObjectContent(bucketName: string, objectName: string, selectOpts: SelectOptions): Promise + + // Presigned operations + presignedUrl(httpMethod: string, bucketName: string, objectName: string, callback: ResultCallback): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + reqParams: { [key: string]: any }, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + reqParams: { [key: string]: any }, + requestDate: Date, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry?: number, + reqParams?: { [key: string]: any }, + requestDate?: Date, + ): Promise + + presignedGetObject(bucketName: string, objectName: string, callback: ResultCallback): void + presignedGetObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void + presignedGetObject( + bucketName: string, + objectName: string, + expiry: number, + respHeaders: { [key: string]: any }, + callback: ResultCallback, + ): void + presignedGetObject( + bucketName: string, + objectName: string, + expiry: number, + respHeaders: { [key: string]: any }, + requestDate: Date, + callback: ResultCallback, + ): void + presignedGetObject( + bucketName: string, + objectName: string, + expiry?: number, + respHeaders?: { [key: string]: any }, + requestDate?: Date, + ): Promise + + presignedPutObject(bucketName: string, objectName: string, callback: ResultCallback): void + presignedPutObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void + presignedPutObject(bucketName: string, objectName: string, expiry?: number): Promise + + presignedPostPolicy(policy: PostPolicy, callback: ResultCallback): void + presignedPostPolicy(policy: PostPolicy): Promise + + // Bucket Policy & Notification operations + getBucketNotification(bucketName: string, callback: ResultCallback): void + getBucketNotification(bucketName: string): Promise + + setBucketNotification( + bucketName: string, + bucketNotificationConfig: NotificationConfig, + callback: NoResultCallback, + ): void + setBucketNotification(bucketName: string, bucketNotificationConfig: NotificationConfig): Promise + + removeAllBucketNotification(bucketName: string, callback: NoResultCallback): void + removeAllBucketNotification(bucketName: string): Promise + + getBucketPolicy(bucketName: string, callback: ResultCallback): void + getBucketPolicy(bucketName: string): Promise + + setBucketPolicy(bucketName: string, bucketPolicy: string, callback: NoResultCallback): void + setBucketPolicy(bucketName: string, bucketPolicy: string): Promise + + listenBucketNotification( + bucketName: string, + prefix: string, + suffix: string, + events: NotificationEvent[], + ): NotificationPoller + + // Custom Settings + setS3TransferAccelerate(endpoint: string): void + + // Other + newPostPolicy(): PostPolicy + + setRequestOptions(options: RequestOptions): void + + // Minio extensions that aren't necessary present for Amazon S3 compatible storage servers + extensions: { + listObjectsV2WithMetadata( + bucketName: string, + prefix?: string, + recursive?: boolean, + startAfter?: string, + ): BucketStream + } +} + +export namespace Policy { + const NONE: 'none' + const READONLY: 'readonly' + const WRITEONLY: 'writeonly' + const READWRITE: 'readwrite' +} + +export class CopyConditions { + setModified(date: Date): void + + setUnmodified(date: Date): void + + setMatchETag(etag: string): void + + setMatchETagExcept(etag: string): void +} + +export class PostPolicy { + setExpires(date: Date): void + + setKey(objectName: string): void + + setKeyStartsWith(prefix: string): void + + setBucket(bucketName: string): void + + setContentType(type: string): void + + setContentTypeStartsWith(prefix: string): void + + setContentLengthRange(min: number, max: number): void + + setContentDisposition(disposition: string): void + + setUserMetaData(metadata: Record): void +} + +export class NotificationPoller extends EventEmitter { + stop(): void + + start(): void + + // must to be public? + checkForChanges(): void +} + +export class NotificationConfig { + add(target: TopicConfig | QueueConfig | CloudFunctionConfig): void +} + +export class TopicConfig extends TargetConfig { + constructor(arn: string) +} + +export class QueueConfig extends TargetConfig { + constructor(arn: string) +} + +export class CloudFunctionConfig extends TargetConfig { + constructor(arn: string) +} + +export class CopySourceOptions { + constructor(options: { + Bucket: string + Object: string + VersionID?: string + MatchETag?: string + NoMatchETag?: string + MatchModifiedSince?: string + MatchUnmodifiedSince?: string + MatchRange?: boolean + Start?: number + End?: number + Encryption?: { + type: string + SSEAlgorithm?: string + KMSMasterKeyID?: string + } + }) + + getHeaders(): Record + + validate(): boolean +} + +export class CopyDestinationOptions { + constructor(options: { + Bucket: string + Object: string + Encryption?: { + type: string + SSEAlgorithm?: string + KMSMasterKeyID?: string + } + UserMetadata?: Record + UserTags?: Record | string + LegalHold?: LegalHoldStatus + RetainUntilDate?: string + Mode?: Mode + }) + + getHeaders(): Record + + validate(): boolean +} + +export function buildARN( + partition: string, + service: string, + region: string, + accountId: string, + resource: string, +): string + +export const ObjectCreatedAll: NotificationEvent // s3:ObjectCreated:*' +export const ObjectCreatedPut: NotificationEvent // s3:ObjectCreated:Put +export const ObjectCreatedPost: NotificationEvent // s3:ObjectCreated:Post +export const ObjectCreatedCopy: NotificationEvent // s3:ObjectCreated:Copy +export const ObjectCreatedCompleteMultipartUpload: NotificationEvent // s3:ObjectCreated:CompleteMultipartUpload +export const ObjectRemovedAll: NotificationEvent // s3:ObjectRemoved:* +export const ObjectRemovedDelete: NotificationEvent // s3:ObjectRemoved:Delete +export const ObjectRemovedDeleteMarkerCreated: NotificationEvent // s3:ObjectRemoved:DeleteMarkerCreated +export const ObjectReducedRedundancyLostObject: NotificationEvent // s3:ReducedRedundancyLostObject From 8b529763260c3865e9c82f893119e1cea8c22606 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 14:28:56 +0800 Subject: [PATCH 02/78] diff --- src/main/AssumeRoleProvider.js | 218 -- src/main/CredentialProvider.js | 49 - src/main/Credentials.js | 36 - src/main/errors.js | 135 - src/main/extensions.js | 172 - src/main/helpers.js | 822 ---- src/main/minio.js | 3985 ------------------- src/main/notification.js | 200 - src/main/object-uploader.js | 283 -- src/main/s3-endpoints.js | 50 - src/main/signing.js | 298 -- src/main/transformers.js | 262 -- src/main/xml-parsers.js | 708 ---- src/test/functional/functional-tests.js | 4731 ----------------------- src/test/unit/test.js | 2108 ---------- 15 files changed, 14057 deletions(-) delete mode 100644 src/main/AssumeRoleProvider.js delete mode 100644 src/main/CredentialProvider.js delete mode 100644 src/main/Credentials.js delete mode 100644 src/main/errors.js delete mode 100644 src/main/extensions.js delete mode 100644 src/main/helpers.js delete mode 100644 src/main/minio.js delete mode 100644 src/main/notification.js delete mode 100644 src/main/object-uploader.js delete mode 100644 src/main/s3-endpoints.js delete mode 100644 src/main/signing.js delete mode 100644 src/main/transformers.js delete mode 100644 src/main/xml-parsers.js delete mode 100644 src/test/functional/functional-tests.js delete mode 100644 src/test/unit/test.js diff --git a/src/main/AssumeRoleProvider.js b/src/main/AssumeRoleProvider.js deleted file mode 100644 index 91f61438..00000000 --- a/src/main/AssumeRoleProvider.js +++ /dev/null @@ -1,218 +0,0 @@ -import Http from 'http' -import Https from 'https' - -import CredentialProvider from './CredentialProvider' -import Credentials from './Credentials' -import { makeDateLong, parseXml, toSha256 } from './helpers' -import { signV4ByServiceName } from './signing' -const { URLSearchParams, URL } = require('url') - -class AssumeRoleProvider extends CredentialProvider { - constructor({ - stsEndpoint, - accessKey, - secretKey, - durationSeconds = 900, - sessionToken, - policy, - region = '', - roleArn, - roleSessionName, - externalId, - token, - webIdentityToken, - action = 'AssumeRole', - transportAgent = undefined, - }) { - super({}) - - this.stsEndpoint = stsEndpoint - this.accessKey = accessKey - this.secretKey = secretKey - this.durationSeconds = durationSeconds - this.policy = policy - this.region = region - this.roleArn = roleArn - this.roleSessionName = roleSessionName - this.externalId = externalId - this.token = token - this.webIdentityToken = webIdentityToken - this.action = action - this.sessionToken = sessionToken - // By default, nodejs uses a global agent if the 'agent' property - // is set to undefined. Otherwise, it's okay to assume the users - // know what they're doing if they specify a custom transport agent. - this.transportAgent = transportAgent - - /** - * Internal Tracking variables - */ - this.credentials = null - this.expirySeconds = null - this.accessExpiresAt = null - } - - getRequestConfig() { - const url = new URL(this.stsEndpoint) - const hostValue = url.hostname - const portValue = url.port - const isHttp = url.protocol.includes('http:') - const qryParams = new URLSearchParams() - qryParams.set('Action', this.action) - qryParams.set('Version', '2011-06-15') - - const defaultExpiry = 900 - let expirySeconds = parseInt(this.durationSeconds) - if (expirySeconds < defaultExpiry) { - expirySeconds = defaultExpiry - } - this.expirySeconds = expirySeconds // for calculating refresh of credentials. - - qryParams.set('DurationSeconds', this.expirySeconds) - - if (this.policy) { - qryParams.set('Policy', this.policy) - } - if (this.roleArn) { - qryParams.set('RoleArn', this.roleArn) - } - - if (this.roleSessionName != null) { - qryParams.set('RoleSessionName', this.roleSessionName) - } - if (this.token != null) { - qryParams.set('Token', this.token) - } - - if (this.webIdentityToken) { - qryParams.set('WebIdentityToken', this.webIdentityToken) - } - - if (this.externalId) { - qryParams.set('ExternalId', this.externalId) - } - - const urlParams = qryParams.toString() - const contentSha256 = toSha256(urlParams) - - const date = new Date() - - /** - * Nodejs's Request Configuration. - */ - const requestOptions = { - hostname: hostValue, - port: portValue, - path: '/', - protocol: url.protocol, - method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - 'content-length': urlParams.length, - host: hostValue, - 'x-amz-date': makeDateLong(date), - 'x-amz-content-sha256': contentSha256, - }, - agent: this.transportAgent, - } - - const authorization = signV4ByServiceName(requestOptions, this.accessKey, this.secretKey, this.region, date, 'sts') - requestOptions.headers.authorization = authorization - - return { - requestOptions, - requestData: urlParams, - isHttp: isHttp, - } - } - - async performRequest() { - const reqObj = this.getRequestConfig() - const requestOptions = reqObj.requestOptions - const requestData = reqObj.requestData - - const isHttp = reqObj.isHttp - const Transport = isHttp ? Http : Https - - const promise = new Promise((resolve, reject) => { - const requestObj = Transport.request(requestOptions, (resp) => { - let resChunks = [] - resp.on('data', (rChunk) => { - resChunks.push(rChunk) - }) - resp.on('end', () => { - let body = Buffer.concat(resChunks).toString() - const xmlobj = parseXml(body) - resolve(xmlobj) - }) - resp.on('error', (err) => { - reject(err) - }) - }) - requestObj.on('error', (e) => { - reject(e) - }) - requestObj.write(requestData) - requestObj.end() - }) - return promise - } - - parseCredentials(respObj = {}) { - if (respObj.ErrorResponse) { - throw new Error('Unable to obtain credentials:', respObj) - } - const { - AssumeRoleResponse: { - AssumeRoleResult: { - Credentials: { - AccessKeyId: accessKey, - SecretAccessKey: secretKey, - SessionToken: sessionToken, - Expiration: expiresAt, - } = {}, - } = {}, - } = {}, - } = respObj - - this.accessExpiresAt = expiresAt - - const newCreds = new Credentials({ - accessKey, - secretKey, - sessionToken, - }) - - this.setCredentials(newCreds) - return this.credentials - } - - async refreshCredentials() { - try { - const assumeRoleCredentials = await this.performRequest() - this.credentials = this.parseCredentials(assumeRoleCredentials) - } catch (err) { - this.credentials = null - } - return this.credentials - } - - async getCredentials() { - let credConfig - if (!this.credentials || (this.credentials && this.isAboutToExpire())) { - credConfig = await this.refreshCredentials() - } else { - credConfig = this.credentials - } - return credConfig - } - - isAboutToExpire() { - const expiresAt = new Date(this.accessExpiresAt) - const provisionalExpiry = new Date(Date.now() + 1000 * 10) // check before 10 seconds. - const isAboutToExpire = provisionalExpiry > expiresAt - return isAboutToExpire - } -} - -export default AssumeRoleProvider diff --git a/src/main/CredentialProvider.js b/src/main/CredentialProvider.js deleted file mode 100644 index ae81a89f..00000000 --- a/src/main/CredentialProvider.js +++ /dev/null @@ -1,49 +0,0 @@ -import Credentials from './Credentials' - -class CredentialProvider { - constructor({ accessKey, secretKey, sessionToken }) { - this.credentials = new Credentials({ - accessKey, - secretKey, - sessionToken, - }) - } - - getCredentials() { - return this.credentials.get() - } - - setCredentials(credentials) { - if (credentials instanceof Credentials) { - this.credentials = credentials - } else { - throw new Error('Unable to set Credentials . it should be an instance of Credentials class') - } - } - - setAccessKey(accessKey) { - this.credentials.setAccessKey(accessKey) - } - - getAccessKey() { - return this.credentials.getAccessKey() - } - - setSecretKey(secretKey) { - this.credentials.setSecretKey(secretKey) - } - - getSecretKey() { - return this.credentials.getSecretKey() - } - - setSessionToken(sessionToken) { - this.credentials.setSessionToken(sessionToken) - } - - getSessionToken() { - return this.credentials.getSessionToken() - } -} - -export default CredentialProvider diff --git a/src/main/Credentials.js b/src/main/Credentials.js deleted file mode 100644 index 07992385..00000000 --- a/src/main/Credentials.js +++ /dev/null @@ -1,36 +0,0 @@ -class Credentials { - constructor({ accessKey, secretKey, sessionToken }) { - this.accessKey = accessKey - this.secretKey = secretKey - this.sessionToken = sessionToken - } - - setAccessKey(accessKey) { - this.accessKey = accessKey - } - getAccessKey() { - return this.accessKey - } - setSecretKey(secretKey) { - this.secretKey = secretKey - } - getSecretKey() { - return this.secretKey - } - setSessionToken(sessionToken) { - this.sessionToken = sessionToken - } - getSessionToken() { - return this.sessionToken - } - - get() { - return { - accessKey: this.accessKey, - secretKey: this.secretKey, - sessionToken: this.sessionToken, - } - } -} - -export default Credentials diff --git a/src/main/errors.js b/src/main/errors.js deleted file mode 100644 index 144a955b..00000000 --- a/src/main/errors.js +++ /dev/null @@ -1,135 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import ExtendableError from 'es6-error' - -// AnonymousRequestError is generated for anonymous keys on specific -// APIs. NOTE: PresignedURL generation always requires access keys. -export class AnonymousRequestError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidArgumentError is generated for all invalid arguments. -export class InvalidArgumentError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidPortError is generated when a non integer value is provided -// for ports. -export class InvalidPortError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidEndpointError is generated when an invalid end point value is -// provided which does not follow domain standards. -export class InvalidEndpointError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidBucketNameError is generated when an invalid bucket name is -// provided which does not follow AWS S3 specifications. -// http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html -export class InvalidBucketNameError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidObjectNameError is generated when an invalid object name is -// provided which does not follow AWS S3 specifications. -// http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html -export class InvalidObjectNameError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// AccessKeyRequiredError generated by signature methods when access -// key is not found. -export class AccessKeyRequiredError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// SecretKeyRequiredError generated by signature methods when secret -// key is not found. -export class SecretKeyRequiredError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// ExpiresParamError generated when expires parameter value is not -// well within stipulated limits. -export class ExpiresParamError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidDateError generated when invalid date is found. -export class InvalidDateError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidPrefixError generated when object prefix provided is invalid -// or does not conform to AWS S3 object key restrictions. -export class InvalidPrefixError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidBucketPolicyError generated when the given bucket policy is invalid. -export class InvalidBucketPolicyError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// IncorrectSizeError generated when total data read mismatches with -// the input size. -export class IncorrectSizeError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// InvalidXMLError generated when an unknown XML is found. -export class InvalidXMLError extends ExtendableError { - constructor(message) { - super(message) - } -} - -// S3Error is generated for errors returned from S3 server. -// see getErrorTransformer for details -export class S3Error extends ExtendableError { - constructor(message) { - super(message) - } -} diff --git a/src/main/extensions.js b/src/main/extensions.js deleted file mode 100644 index 8ba596fa..00000000 --- a/src/main/extensions.js +++ /dev/null @@ -1,172 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2020 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import Stream from 'stream' - -import * as errors from './errors.js' -import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.js' -import * as transformers from './transformers' - -export default class extensions { - constructor(client) { - this.client = client - } - - // List the objects in the bucket using S3 ListObjects V2 With Metadata - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) - // - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - // * `obj.metadata` _object_: metadata of the object - - listObjectsV2WithMetadata(bucketName, prefix, recursive, startAfter) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (startAfter === undefined) { - startAfter = '' - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - // if recursive is false set delimiter to '/' - var delimiter = recursive ? '' : '/' - var continuationToken = '' - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsV2WithMetadataQuery(bucketName, prefix, continuationToken, delimiter, 1000, startAfter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - continuationToken = result.nextContinuationToken - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // listObjectsV2WithMetadataQuery - (List Objects V2 with metadata) - List some or all (up to 1000) of the objects in a bucket. - // - // You can use the request parameters as selection criteria to return a subset of the objects in a bucket. - // request parameters :- - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: Limits the response to keys that begin with the specified prefix. - // * `continuation-token` _string_: Used to continue iterating over a set of objects. - // * `delimiter` _string_: A delimiter is a character you use to group keys. - // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. - // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. - - listObjectsV2WithMetadataQuery(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(continuationToken)) { - throw new TypeError('continuationToken should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - if (!isNumber(maxKeys)) { - throw new TypeError('maxKeys should be of type "number"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - var queries = [] - - // Call for listing objects v2 API - queries.push(`list-type=2`) - queries.push(`encoding-type=url`) - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - queries.push(`metadata=true`) - - if (continuationToken) { - continuationToken = uriEscape(continuationToken) - queries.push(`continuation-token=${continuationToken}`) - } - // Set start-after - if (startAfter) { - startAfter = uriEscape(startAfter) - queries.push(`start-after=${startAfter}`) - } - // no need to escape maxKeys - if (maxKeys) { - if (maxKeys >= 1000) { - maxKeys = 1000 - } - queries.push(`max-keys=${maxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListObjectsV2WithMetadataTransformer() - this.client.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } -} diff --git a/src/main/helpers.js b/src/main/helpers.js deleted file mode 100644 index 1da1197f..00000000 --- a/src/main/helpers.js +++ /dev/null @@ -1,822 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import mime from 'mime-types' -import stream from 'stream' -var Crypto = require('crypto-browserify') -const { XMLParser } = require('fast-xml-parser') -const fxp = new XMLParser() - -const ipaddr = require('ipaddr.js') -import { isBrowser } from 'browser-or-node' - -const fs = require('fs') -const path = require('path') -import _ from 'lodash' -import querystring from 'query-string' - -import * as errors from './errors.js' - -// Returns a wrapper function that will promisify a given callback function. -// It will preserve 'this'. -export function promisify(fn) { - return function () { - // If the last argument is a function, assume its the callback. - let callback = arguments[arguments.length - 1] - - // If the callback is given, don't promisify, just pass straight in. - if (typeof callback === 'function') { - return fn.apply(this, arguments) - } - - // Otherwise, create a new set of arguments, and wrap - // it in a promise. - let args = [...arguments] - - return new Promise((resolve, reject) => { - // Add the callback function. - args.push((err, value) => { - if (err) { - return reject(err) - } - - resolve(value) - }) - - // Call the function with our special adaptor callback added. - fn.apply(this, args) - }) - } -} - -// All characters in string which are NOT unreserved should be percent encoded. -// Unreserved characers are : ALPHA / DIGIT / "-" / "." / "_" / "~" -// Reference https://tools.ietf.org/html/rfc3986#section-2.2 -export function uriEscape(string) { - return string.split('').reduce((acc, elem) => { - let buf = Buffer.from(elem) - if (buf.length === 1) { - // length 1 indicates that elem is not a unicode character. - // Check if it is an unreserved characer. - if ( - ('A' <= elem && elem <= 'Z') || - ('a' <= elem && elem <= 'z') || - ('0' <= elem && elem <= '9') || - elem === '_' || - elem === '.' || - elem === '~' || - elem === '-' - ) { - // Unreserved characer should not be encoded. - acc = acc + elem - return acc - } - } - // elem needs encoding - i.e elem should be encoded if it's not unreserved - // character or if it's a unicode character. - for (var i = 0; i < buf.length; i++) { - acc = acc + '%' + buf[i].toString(16).toUpperCase() - } - return acc - }, '') -} - -export function uriResourceEscape(string) { - return uriEscape(string).replace(/%2F/g, '/') -} - -export function getScope(region, date, serviceName = 's3') { - return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request` -} - -// isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' -export function isAmazonEndpoint(endpoint) { - return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn' -} - -// isVirtualHostStyle - verify if bucket name is support with virtual -// hosts. bucketNames with periods should be always treated as path -// style if the protocol is 'https:', this is due to SSL wildcard -// limitation. For all other buckets and Amazon S3 endpoint we will -// default to virtual host style. -export function isVirtualHostStyle(endpoint, protocol, bucket, pathStyle) { - if (protocol === 'https:' && bucket.indexOf('.') > -1) { - return false - } - return isAmazonEndpoint(endpoint) || !pathStyle -} - -export function isValidIP(ip) { - return ipaddr.isValid(ip) -} - -// isValidEndpoint - true if endpoint is valid domain. -export function isValidEndpoint(endpoint) { - return isValidDomain(endpoint) || isValidIP(endpoint) -} - -// isValidDomain - true if input host is a valid domain. -export function isValidDomain(host) { - if (!isString(host)) { - return false - } - // See RFC 1035, RFC 3696. - if (host.length === 0 || host.length > 255) { - return false - } - // Host cannot start or end with a '-' - if (host[0] === '-' || host.slice(-1) === '-') { - return false - } - // Host cannot start or end with a '_' - if (host[0] === '_' || host.slice(-1) === '_') { - return false - } - // Host cannot start with a '.' - if (host[0] === '.') { - return false - } - var alphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:> -1) { - return false - } - } - // No need to regexp match, since the list is non-exhaustive. - // We let it be valid and fail later. - return true -} - -// Probes contentType using file extensions. -// For example: probeContentType('file.png') returns 'image/png'. -export function probeContentType(path) { - let contentType = mime.lookup(path) - if (!contentType) { - contentType = 'application/octet-stream' - } - return contentType -} - -// isValidPort - is input port valid. -export function isValidPort(port) { - // verify if port is a number. - if (!isNumber(port)) { - return false - } - // port cannot be negative. - if (port < 0) { - return false - } - // port '0' is valid and special case return true. - if (port === 0) { - return true - } - var min_port = 1 - var max_port = 65535 - // Verify if port is in range. - return port >= min_port && port <= max_port -} - -export function isValidBucketName(bucket) { - if (!isString(bucket)) { - return false - } - - // bucket length should be less than and no more than 63 - // characters long. - if (bucket.length < 3 || bucket.length > 63) { - return false - } - // bucket with successive periods is invalid. - if (bucket.indexOf('..') > -1) { - return false - } - // bucket cannot have ip address style. - if (bucket.match(/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/)) { - return false - } - // bucket should begin with alphabet/number and end with alphabet/number, - // with alphabet/number/.- in the middle. - if (bucket.match(/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/)) { - return true - } - return false -} - -// check if objectName is a valid object name -export function isValidObjectName(objectName) { - if (!isValidPrefix(objectName)) { - return false - } - if (objectName.length === 0) { - return false - } - return true -} - -// check if prefix is valid -export function isValidPrefix(prefix) { - if (!isString(prefix)) { - return false - } - if (prefix.length > 1024) { - return false - } - return true -} - -// check if typeof arg number -export function isNumber(arg) { - return typeof arg === 'number' -} - -// check if typeof arg function -export function isFunction(arg) { - return typeof arg === 'function' -} - -// check if typeof arg string -export function isString(arg) { - return typeof arg === 'string' -} - -// check if typeof arg object -export function isObject(arg) { - return typeof arg === 'object' && arg !== null -} - -// check if object is readable stream -export function isReadableStream(arg) { - return isObject(arg) && isFunction(arg._read) -} - -// check if arg is boolean -export function isBoolean(arg) { - return typeof arg === 'boolean' -} - -// check if arg is array -export function isArray(arg) { - return Array.isArray(arg) -} - -// check if arg is a valid date -export function isValidDate(arg) { - return arg instanceof Date && !isNaN(arg) -} - -// Create a Date string with format: -// 'YYYYMMDDTHHmmss' + Z -export function makeDateLong(date) { - date = date || new Date() - - // Gives format like: '2017-08-07T16:28:59.889Z' - date = date.toISOString() - - return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 13) + date.slice(14, 16) + date.slice(17, 19) + 'Z' -} - -// Create a Date string with format: -// 'YYYYMMDD' -export function makeDateShort(date) { - date = date || new Date() - - // Gives format like: '2017-08-07T16:28:59.889Z' - date = date.toISOString() - - return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 10) -} - -// pipesetup sets up pipe() from left to right os streams array -// pipesetup will also make sure that error emitted at any of the upstream Stream -// will be emitted at the last stream. This makes error handling simple -export function pipesetup(...streams) { - return streams.reduce((src, dst) => { - src.on('error', (err) => dst.emit('error', err)) - return src.pipe(dst) - }) -} - -// return a Readable stream that emits data -export function readableStream(data) { - var s = new stream.Readable() - s._read = () => {} - s.push(data) - s.push(null) - return s -} - -// Process metadata to insert appropriate value to `content-type` attribute -export function insertContentType(metaData, filePath) { - // check if content-type attribute present in metaData - for (var key in metaData) { - if (key.toLowerCase() === 'content-type') { - return metaData - } - } - // if `content-type` attribute is not present in metadata, - // then infer it from the extension in filePath - var newMetadata = Object.assign({}, metaData) - newMetadata['content-type'] = probeContentType(filePath) - return newMetadata -} - -// Function prepends metadata with the appropriate prefix if it is not already on -export function prependXAMZMeta(metaData) { - var newMetadata = Object.assign({}, metaData) - for (var key in metaData) { - if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageclassHeader(key)) { - newMetadata['X-Amz-Meta-' + key] = newMetadata[key] - delete newMetadata[key] - } - } - return newMetadata -} - -// Checks if it is a valid header according to the AmazonS3 API -export function isAmzHeader(key) { - var temp = key.toLowerCase() - return ( - temp.startsWith('x-amz-meta-') || - temp === 'x-amz-acl' || - temp.startsWith('x-amz-server-side-encryption-') || - temp === 'x-amz-server-side-encryption' - ) -} -// Checks if it is a supported Header -export function isSupportedHeader(key) { - var supported_headers = [ - 'content-type', - 'cache-control', - 'content-encoding', - 'content-disposition', - 'content-language', - 'x-amz-website-redirect-location', - ] - return supported_headers.indexOf(key.toLowerCase()) > -1 -} -// Checks if it is a storage header -export function isStorageclassHeader(key) { - return key.toLowerCase() === 'x-amz-storage-class' -} - -export function extractMetadata(metaData) { - var newMetadata = {} - for (var key in metaData) { - if (isSupportedHeader(key) || isStorageclassHeader(key) || isAmzHeader(key)) { - if (key.toLowerCase().startsWith('x-amz-meta-')) { - newMetadata[key.slice(11, key.length)] = metaData[key] - } else { - newMetadata[key] = metaData[key] - } - } - } - return newMetadata -} - -export function getVersionId(headers = {}) { - const versionIdValue = headers['x-amz-version-id'] - return versionIdValue || null -} - -export function getSourceVersionId(headers = {}) { - const sourceVersionId = headers['x-amz-copy-source-version-id'] - return sourceVersionId || null -} - -export function sanitizeETag(etag = '') { - var replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } - return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m]) -} - -export const RETENTION_MODES = { - GOVERNANCE: 'GOVERNANCE', - COMPLIANCE: 'COMPLIANCE', -} - -export const RETENTION_VALIDITY_UNITS = { - DAYS: 'Days', - YEARS: 'Years', -} - -export const LEGAL_HOLD_STATUS = { - ENABLED: 'ON', - DISABLED: 'OFF', -} - -const objectToBuffer = (payload) => { - const payloadBuf = Buffer.from(Buffer.from(payload)) - return payloadBuf -} - -export const toMd5 = (payload) => { - let payLoadBuf = objectToBuffer(payload) - // use string from browser and buffer from nodejs - // browser support is tested only against minio server - payLoadBuf = isBrowser ? payLoadBuf.toString() : payLoadBuf - return Crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') -} - -export const toSha256 = (payload) => { - return Crypto.createHash('sha256').update(payload).digest('hex') -} - -// toArray returns a single element array with param being the element, -// if param is just a string, and returns 'param' back if it is an array -// So, it makes sure param is always an array -export const toArray = (param) => { - if (!Array.isArray(param)) { - return [param] - } - return param -} - -export const sanitizeObjectKey = (objectName) => { - // + symbol characters are not decoded as spaces in JS. so replace them first and decode to get the correct result. - let asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') - const sanitizedName = decodeURIComponent(asStrName) - return sanitizedName -} - -export const PART_CONSTRAINTS = { - // absMinPartSize - absolute minimum part size (5 MiB) - ABS_MIN_PART_SIZE: 1024 * 1024 * 5, - // MIN_PART_SIZE - minimum part size 16MiB per object after which - MIN_PART_SIZE: 1024 * 1024 * 16, - // MAX_PARTS_COUNT - maximum number of parts for a single multipart session. - MAX_PARTS_COUNT: 10000, - // MAX_PART_SIZE - maximum part size 5GiB for a single multipart upload - // operation. - MAX_PART_SIZE: 1024 * 1024 * 1024 * 5, - // MAX_SINGLE_PUT_OBJECT_SIZE - maximum size 5GiB of object per PUT - // operation. - MAX_SINGLE_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 5, - // MAX_MULTIPART_PUT_OBJECT_SIZE - maximum size 5TiB of object for - // Multipart operation. - MAX_MULTIPART_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 1024 * 5, -} - -export const ENCRYPTION_TYPES = { - // SSEC represents server-side-encryption with customer provided keys - SSEC: 'SSE-C', - // KMS represents server-side-encryption with managed keys - KMS: 'KMS', -} -const GENERIC_SSE_HEADER = 'X-Amz-Server-Side-Encryption' - -const ENCRYPTION_HEADERS = { - // sseGenericHeader is the AWS SSE header used for SSE-S3 and SSE-KMS. - sseGenericHeader: GENERIC_SSE_HEADER, - // sseKmsKeyID is the AWS SSE-KMS key id. - sseKmsKeyID: GENERIC_SSE_HEADER + '-Aws-Kms-Key-Id', -} - -/** - * Return Encryption headers - * @param encConfig - * @returns an object with key value pairs that can be used in headers. - */ -function getEncryptionHeaders(encConfig) { - const encType = encConfig.type - const encHeaders = {} - if (!_.isEmpty(encType)) { - if (encType === ENCRYPTION_TYPES.SSEC) { - return { - [encHeaders[ENCRYPTION_HEADERS.sseGenericHeader]]: 'AES256', - } - } else if (encType === ENCRYPTION_TYPES.KMS) { - return { - [ENCRYPTION_HEADERS.sseGenericHeader]: encConfig.SSEAlgorithm, - [ENCRYPTION_HEADERS.sseKmsKeyID]: encConfig.KMSMasterKeyID, - } - } - } - - return encHeaders -} - -export class CopySourceOptions { - /** - * - * @param Bucket __string__ Bucket Name - * @param Object __string__ Object Name - * @param VersionID __string__ Valid versionId - * @param MatchETag __string__ Etag to match - * @param NoMatchETag __string__ Etag to exclude - * @param MatchModifiedSince __string__ Modified Date of the object/part. UTC Date in string format - * @param MatchUnmodifiedSince __string__ Modified Date of the object/part to exclude UTC Date in string format - * @param MatchRange __boolean__ true or false Object range to match - * @param Start - * @param End - * @param Encryption - */ - constructor({ - Bucket = '', - Object = '', - VersionID = '', - MatchETag = '', - NoMatchETag = '', - MatchModifiedSince = null, - MatchUnmodifiedSince = null, - MatchRange = false, - Start = 0, - End = 0, - Encryption = {}, - } = {}) { - this.Bucket = Bucket - this.Object = Object - this.VersionID = VersionID - this.MatchETag = MatchETag - this.NoMatchETag = NoMatchETag - this.MatchModifiedSince = MatchModifiedSince - this.MatchUnmodifiedSince = MatchUnmodifiedSince - this.MatchRange = MatchRange - this.Start = Start - this.End = End - this.Encryption = Encryption - } - - validate() { - if (!isValidBucketName(this.Bucket)) { - throw new errors.InvalidBucketNameError('Invalid Source bucket name: ' + this.Bucket) - } - if (!isValidObjectName(this.Object)) { - throw new errors.InvalidObjectNameError(`Invalid Source object name: ${this.Object}`) - } - if ((this.MatchRange && this.Start !== -1 && this.End !== -1 && this.Start > this.End) || this.Start < 0) { - throw new errors.InvalidObjectNameError('Source start must be non-negative, and start must be at most end.') - } else if ((this.MatchRange && !isNumber(this.Start)) || !isNumber(this.End)) { - throw new errors.InvalidObjectNameError( - 'MatchRange is specified. But Invalid Start and End values are specified. ', - ) - } - - return true - } - - getHeaders() { - let headerOptions = {} - headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) - - if (!_.isEmpty(this.VersionID)) { - headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID - } - - if (!_.isEmpty(this.MatchETag)) { - headerOptions['x-amz-copy-source-if-match'] = this.MatchETag - } - if (!_.isEmpty(this.NoMatchETag)) { - headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag - } - - if (!_.isEmpty(this.MatchModifiedSince)) { - headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince - } - if (!_.isEmpty(this.MatchUnmodifiedSince)) { - headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince - } - - return headerOptions - } -} - -export class CopyDestinationOptions { - /* - * @param Bucket __string__ - * @param Object __string__ Object Name for the destination (composed/copied) object defaults - * @param Encryption __object__ Encryption configuration defaults to {} - * @param UserMetadata __object__ - * @param UserTags __object__ | __string__ - * @param LegalHold __string__ ON | OFF - * @param RetainUntilDate __string__ UTC Date String - * @param Mode - */ - constructor({ - Bucket = '', - Object = '', - Encryption = null, - UserMetadata = null, - UserTags = null, - LegalHold = null, - RetainUntilDate = null, - Mode = null, // - }) { - this.Bucket = Bucket - this.Object = Object - this.Encryption = Encryption - this.UserMetadata = UserMetadata - this.UserTags = UserTags - this.LegalHold = LegalHold - this.Mode = Mode // retention mode - this.RetainUntilDate = RetainUntilDate - } - - getHeaders() { - const replaceDirective = 'REPLACE' - const headerOptions = {} - - const userTags = this.UserTags - if (!_.isEmpty(userTags)) { - headerOptions['X-Amz-Tagging-Directive'] = replaceDirective - headerOptions['X-Amz-Tagging'] = isObject(userTags) - ? querystring.stringify(userTags) - : isString(userTags) - ? userTags - : '' - } - - if (!_.isEmpty(this.Mode)) { - headerOptions['X-Amz-Object-Lock-Mode'] = this.Mode // GOVERNANCE or COMPLIANCE - } - - if (!_.isEmpty(this.RetainUntilDate)) { - headerOptions['X-Amz-Object-Lock-Retain-Until-Date'] = this.RetainUntilDate // needs to be UTC. - } - - if (!_.isEmpty(this.LegalHold)) { - headerOptions['X-Amz-Object-Lock-Legal-Hold'] = this.LegalHold // ON or OFF - } - - if (!_.isEmpty(this.UserMetadata)) { - const headerKeys = Object.keys(this.UserMetadata) - headerKeys.forEach((key) => { - headerOptions[`X-Amz-Meta-${key}`] = this.UserMetadata[key] - }) - } - - if (!_.isEmpty(this.Encryption)) { - const encryptionHeaders = getEncryptionHeaders(this.Encryption) - Object.keys(encryptionHeaders).forEach((key) => { - headerOptions[key] = encryptionHeaders[key] - }) - } - return headerOptions - } - validate() { - if (!isValidBucketName(this.Bucket)) { - throw new errors.InvalidBucketNameError('Invalid Destination bucket name: ' + this.Bucket) - } - if (!isValidObjectName(this.Object)) { - throw new errors.InvalidObjectNameError(`Invalid Destination object name: ${this.Object}`) - } - if (!_.isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) { - throw new errors.InvalidObjectNameError(`Destination UserMetadata should be an object with key value pairs`) - } - - if (!_.isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) { - throw new errors.InvalidObjectNameError( - `Invalid Mode specified for destination object it should be one of [GOVERNANCE,COMPLIANCE]`, - ) - } - - if (!_.isEmpty(this.Encryption) && _.isEmpty(this.Encryption)) { - throw new errors.InvalidObjectNameError(`Invalid Encryption configuration for destination object `) - } - return true - } -} - -export const partsRequired = (size) => { - let maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) - let requiredPartSize = size / maxPartSize - if (size % maxPartSize > 0) { - requiredPartSize++ - } - requiredPartSize = Math.trunc(requiredPartSize) - return requiredPartSize -} - -// calculateEvenSplits - computes splits for a source and returns -// start and end index slices. Splits happen evenly to be sure that no -// part is less than 5MiB, as that could fail the multipart request if -// it is not the last part. - -let startIndexParts = [] -let endIndexParts = [] -export function calculateEvenSplits(size, objInfo) { - if (size === 0) { - return null - } - const reqParts = partsRequired(size) - startIndexParts = new Array(reqParts) - endIndexParts = new Array(reqParts) - - let start = objInfo.Start - if (_.isEmpty(objInfo.Start) || start === -1) { - start = 0 - } - const divisorValue = Math.trunc(size / reqParts) - - const reminderValue = size % reqParts - - let nextStart = start - - for (let i = 0; i < reqParts; i++) { - let curPartSize = divisorValue - if (i < reminderValue) { - curPartSize++ - } - - const currentStart = nextStart - let currentEnd = currentStart + curPartSize - 1 - nextStart = currentEnd + 1 - - startIndexParts[i] = currentStart - endIndexParts[i] = currentEnd - } - - return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo } -} - -export function removeDirAndFiles(dirPath, removeSelf) { - if (removeSelf === undefined) { - removeSelf = true - } - try { - var files = fs.readdirSync(dirPath) - } catch (e) { - return - } - if (files.length > 0) { - for (var i = 0; i < files.length; i++) { - var filePath = path.join(dirPath, files[i]) - if (fs.statSync(filePath).isFile()) { - fs.unlinkSync(filePath) - } else { - removeDirAndFiles(filePath) - } - } - } - if (removeSelf) { - fs.rmdirSync(dirPath) - } -} - -export const parseXml = (xml) => { - let result = null - result = fxp.parse(xml) - if (result.Error) { - throw result.Error - } - - return result -} - -export class SelectResults { - constructor({ - records, // parsed data as stream - response, // original response stream - stats, // stats as xml - progress, // stats as xml - }) { - this.records = records - this.response = response - this.stats = stats - this.progress = progress - } - - setStats(stats) { - this.stats = stats - } - getStats() { - return this.stats - } - - setProgress(progress) { - this.progress = progress - } - getProgress() { - return this.progress - } - - setResponse(response) { - this.response = response - } - getResponse() { - return this.response - } - - setRecords(records) { - this.records = records - } - - getRecords() { - return this.records - } -} - -export const DEFAULT_REGION = 'us-east-1' diff --git a/src/main/minio.js b/src/main/minio.js deleted file mode 100644 index 253973f7..00000000 --- a/src/main/minio.js +++ /dev/null @@ -1,3985 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import async from 'async' -import BlockStream2 from 'block-stream2' -import fs from 'fs' -import Http from 'http' -import Https from 'https' -import _ from 'lodash' -import mkdirp from 'mkdirp' -import path from 'path' -import querystring from 'query-string' -import Stream from 'stream' -import { TextEncoder } from 'web-encoding' -import Xml from 'xml' -import xml2js from 'xml2js' - -import CredentialProvider from './CredentialProvider' -import * as errors from './errors.js' -import extensions from './extensions' -import { - calculateEvenSplits, - CopyDestinationOptions, - CopySourceOptions, - DEFAULT_REGION, - extractMetadata, - getScope, - getSourceVersionId, - getVersionId, - insertContentType, - isAmazonEndpoint, - isArray, - isBoolean, - isFunction, - isNumber, - isObject, - isReadableStream, - isString, - isValidBucketName, - isValidDate, - isValidEndpoint, - isValidObjectName, - isValidPort, - isValidPrefix, - isVirtualHostStyle, - LEGAL_HOLD_STATUS, - makeDateLong, - PART_CONSTRAINTS, - partsRequired, - pipesetup, - prependXAMZMeta, - promisify, - readableStream, - RETENTION_MODES, - RETENTION_VALIDITY_UNITS, - sanitizeETag, - toMd5, - toSha256, - uriEscape, - uriResourceEscape, -} from './helpers.js' -import { NotificationConfig, NotificationPoller } from './notification' -import ObjectUploader from './object-uploader' -import { getS3Endpoint } from './s3-endpoints.js' -import { postPresignSignatureV4, presignSignatureV4, signV4 } from './signing.js' -import * as transformers from './transformers' -import { parseSelectObjectContentResponse } from './xml-parsers' - -var Package = require('../../package.json') - -export class Client { - constructor(params) { - if (typeof params.secure !== 'undefined') { - throw new Error('"secure" option deprecated, "useSSL" should be used instead') - } - // Default values if not specified. - if (typeof params.useSSL === 'undefined') { - params.useSSL = true - } - if (!params.port) { - params.port = 0 - } - // Validate input params. - if (!isValidEndpoint(params.endPoint)) { - throw new errors.InvalidEndpointError(`Invalid endPoint : ${params.endPoint}`) - } - if (!isValidPort(params.port)) { - throw new errors.InvalidArgumentError(`Invalid port : ${params.port}`) - } - if (!isBoolean(params.useSSL)) { - throw new errors.InvalidArgumentError( - `Invalid useSSL flag type : ${params.useSSL}, expected to be of type "boolean"`, - ) - } - - // Validate region only if its set. - if (params.region) { - if (!isString(params.region)) { - throw new errors.InvalidArgumentError(`Invalid region : ${params.region}`) - } - } - - var host = params.endPoint.toLowerCase() - var port = params.port - var protocol = '' - var transport - var transportAgent - // Validate if configuration is not using SSL - // for constructing relevant endpoints. - if (params.useSSL === false) { - transport = Http - protocol = 'http:' - if (port === 0) { - port = 80 - } - transportAgent = Http.globalAgent - } else { - // Defaults to secure. - transport = Https - protocol = 'https:' - if (port === 0) { - port = 443 - } - transportAgent = Https.globalAgent - } - - // if custom transport is set, use it. - if (params.transport) { - if (!isObject(params.transport)) { - throw new errors.InvalidArgumentError( - `Invalid transport type : ${params.transport}, expected to be type "object"`, - ) - } - transport = params.transport - } - - // if custom transport agent is set, use it. - if (params.transportAgent) { - if (!isObject(params.transportAgent)) { - throw new errors.InvalidArgumentError( - `Invalid transportAgent type: ${params.transportAgent}, expected to be type "object"`, - ) - } - - transportAgent = params.transportAgent - } - - // User Agent should always following the below style. - // Please open an issue to discuss any new changes here. - // - // MinIO (OS; ARCH) LIB/VER APP/VER - // - var libraryComments = `(${process.platform}; ${process.arch})` - var libraryAgent = `MinIO ${libraryComments} minio-js/${Package.version}` - // User agent block ends. - - this.transport = transport - this.transportAgent = transportAgent - this.host = host - this.port = port - this.protocol = protocol - this.accessKey = params.accessKey - this.secretKey = params.secretKey - this.sessionToken = params.sessionToken - this.userAgent = `${libraryAgent}` - - // Default path style is true - if (params.pathStyle === undefined) { - this.pathStyle = true - } else { - this.pathStyle = params.pathStyle - } - - if (!this.accessKey) { - this.accessKey = '' - } - if (!this.secretKey) { - this.secretKey = '' - } - this.anonymous = !this.accessKey || !this.secretKey - - if (params.credentialsProvider) { - this.credentialsProvider = params.credentialsProvider - this.checkAndRefreshCreds() - } - - this.regionMap = {} - if (params.region) { - this.region = params.region - } - - this.partSize = 64 * 1024 * 1024 - if (params.partSize) { - this.partSize = params.partSize - this.overRidePartSize = true - } - if (this.partSize < 5 * 1024 * 1024) { - throw new errors.InvalidArgumentError(`Part size should be greater than 5MB`) - } - if (this.partSize > 5 * 1024 * 1024 * 1024) { - throw new errors.InvalidArgumentError(`Part size should be less than 5GB`) - } - - this.maximumPartSize = 5 * 1024 * 1024 * 1024 - this.maxObjectSize = 5 * 1024 * 1024 * 1024 * 1024 - // SHA256 is enabled only for authenticated http requests. If the request is authenticated - // and the connection is https we use x-amz-content-sha256=UNSIGNED-PAYLOAD - // header for signature calculation. - this.enableSHA256 = !this.anonymous && !params.useSSL - - this.s3AccelerateEndpoint = params.s3AccelerateEndpoint || null - this.reqOptions = {} - } - - // This is s3 Specific and does not hold validity in any other Object storage. - getAccelerateEndPointIfSet(bucketName, objectName) { - if (!_.isEmpty(this.s3AccelerateEndpoint) && !_.isEmpty(bucketName) && !_.isEmpty(objectName)) { - // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html - // Disable transfer acceleration for non-compliant bucket names. - if (bucketName.indexOf('.') !== -1) { - throw new Error(`Transfer Acceleration is not supported for non compliant bucket:${bucketName}`) - } - // If transfer acceleration is requested set new host. - // For more details about enabling transfer acceleration read here. - // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html - return this.s3AccelerateEndpoint - } - return false - } - - /** - * @param endPoint _string_ valid S3 acceleration end point - */ - setS3TransferAccelerate(endPoint) { - this.s3AccelerateEndpoint = endPoint - } - - // Sets the supported request options. - setRequestOptions(options) { - if (!isObject(options)) { - throw new TypeError('request options should be of type "object"') - } - this.reqOptions = _.pick(options, [ - 'agent', - 'ca', - 'cert', - 'ciphers', - 'clientCertEngine', - 'crl', - 'dhparam', - 'ecdhCurve', - 'family', - 'honorCipherOrder', - 'key', - 'passphrase', - 'pfx', - 'rejectUnauthorized', - 'secureOptions', - 'secureProtocol', - 'servername', - 'sessionIdContext', - ]) - } - - // returns *options* object that can be used with http.request() - // Takes care of constructing virtual-host-style or path-style hostname - getRequestOptions(opts) { - var method = opts.method - var region = opts.region - var bucketName = opts.bucketName - var objectName = opts.objectName - var headers = opts.headers - var query = opts.query - - var reqOptions = { method } - reqOptions.headers = {} - - // If custom transportAgent was supplied earlier, we'll inject it here - reqOptions.agent = this.transportAgent - - // Verify if virtual host supported. - var virtualHostStyle - if (bucketName) { - virtualHostStyle = isVirtualHostStyle(this.host, this.protocol, bucketName, this.pathStyle) - } - - if (this.port) { - reqOptions.port = this.port - } - reqOptions.protocol = this.protocol - - if (objectName) { - objectName = `${uriResourceEscape(objectName)}` - } - - reqOptions.path = '/' - - // Save host. - reqOptions.host = this.host - // For Amazon S3 endpoint, get endpoint based on region. - if (isAmazonEndpoint(reqOptions.host)) { - const accelerateEndPoint = this.getAccelerateEndPointIfSet(bucketName, objectName) - if (accelerateEndPoint) { - reqOptions.host = `${accelerateEndPoint}` - } else { - reqOptions.host = getS3Endpoint(region) - } - } - - if (virtualHostStyle && !opts.pathStyle) { - // For all hosts which support virtual host style, `bucketName` - // is part of the hostname in the following format: - // - // var host = 'bucketName.example.com' - // - if (bucketName) { - reqOptions.host = `${bucketName}.${reqOptions.host}` - } - if (objectName) { - reqOptions.path = `/${objectName}` - } - } else { - // For all S3 compatible storage services we will fallback to - // path style requests, where `bucketName` is part of the URI - // path. - if (bucketName) { - reqOptions.path = `/${bucketName}` - } - if (objectName) { - reqOptions.path = `/${bucketName}/${objectName}` - } - } - - if (query) { - reqOptions.path += `?${query}` - } - reqOptions.headers.host = reqOptions.host - if ( - (reqOptions.protocol === 'http:' && reqOptions.port !== 80) || - (reqOptions.protocol === 'https:' && reqOptions.port !== 443) - ) { - reqOptions.headers.host = `${reqOptions.host}:${reqOptions.port}` - } - reqOptions.headers['user-agent'] = this.userAgent - if (headers) { - // have all header keys in lower case - to make signing easy - _.map(headers, (v, k) => (reqOptions.headers[k.toLowerCase()] = v)) - } - - // Use any request option specified in minioClient.setRequestOptions() - reqOptions = Object.assign({}, this.reqOptions, reqOptions) - - return reqOptions - } - - // Set application specific information. - // - // Generates User-Agent in the following style. - // - // MinIO (OS; ARCH) LIB/VER APP/VER - // - // __Arguments__ - // * `appName` _string_ - Application name. - // * `appVersion` _string_ - Application version. - setAppInfo(appName, appVersion) { - if (!isString(appName)) { - throw new TypeError(`Invalid appName: ${appName}`) - } - if (appName.trim() === '') { - throw new errors.InvalidArgumentError('Input appName cannot be empty.') - } - if (!isString(appVersion)) { - throw new TypeError(`Invalid appVersion: ${appVersion}`) - } - if (appVersion.trim() === '') { - throw new errors.InvalidArgumentError('Input appVersion cannot be empty.') - } - this.userAgent = `${this.userAgent} ${appName}/${appVersion}` - } - - // Calculate part size given the object size. Part size will be atleast this.partSize - calculatePartSize(size) { - if (!isNumber(size)) { - throw new TypeError('size should be of type "number"') - } - if (size > this.maxObjectSize) { - throw new TypeError(`size should not be more than ${this.maxObjectSize}`) - } - if (this.overRidePartSize) { - return this.partSize - } - var partSize = this.partSize - for (;;) { - // while(true) {...} throws linting error. - // If partSize is big enough to accomodate the object size, then use it. - if (partSize * 10000 > size) { - return partSize - } - // Try part sizes as 64MB, 80MB, 96MB etc. - partSize += 16 * 1024 * 1024 - } - } - - // log the request, response, error - logHTTP(reqOptions, response, err) { - // if no logstreamer available return. - if (!this.logStream) { - return - } - if (!isObject(reqOptions)) { - throw new TypeError('reqOptions should be of type "object"') - } - if (response && !isReadableStream(response)) { - throw new TypeError('response should be of type "Stream"') - } - if (err && !(err instanceof Error)) { - throw new TypeError('err should be of type "Error"') - } - var logHeaders = (headers) => { - _.forEach(headers, (v, k) => { - if (k == 'authorization') { - var redacter = new RegExp('Signature=([0-9a-f]+)') - v = v.replace(redacter, 'Signature=**REDACTED**') - } - this.logStream.write(`${k}: ${v}\n`) - }) - this.logStream.write('\n') - } - this.logStream.write(`REQUEST: ${reqOptions.method} ${reqOptions.path}\n`) - logHeaders(reqOptions.headers) - if (response) { - this.logStream.write(`RESPONSE: ${response.statusCode}\n`) - logHeaders(response.headers) - } - if (err) { - this.logStream.write('ERROR BODY:\n') - var errJSON = JSON.stringify(err, null, '\t') - this.logStream.write(`${errJSON}\n`) - } - } - - // Enable tracing - traceOn(stream) { - if (!stream) { - stream = process.stdout - } - this.logStream = stream - } - - // Disable tracing - traceOff() { - this.logStream = null - } - - // makeRequest is the primitive used by the apis for making S3 requests. - // payload can be empty string in case of no payload. - // statusCode is the expected statusCode. If response.statusCode does not match - // we parse the XML error and call the callback with the error message. - // A valid region is passed by the calls - listBuckets, makeBucket and - // getBucketRegion. - makeRequest(options, payload, statusCodes, region, returnResponse, cb) { - if (!isObject(options)) { - throw new TypeError('options should be of type "object"') - } - if (!isString(payload) && !isObject(payload)) { - // Buffer is of type 'object' - throw new TypeError('payload should be of type "string" or "Buffer"') - } - statusCodes.forEach((statusCode) => { - if (!isNumber(statusCode)) { - throw new TypeError('statusCode should be of type "number"') - } - }) - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isBoolean(returnResponse)) { - throw new TypeError('returnResponse should be of type "boolean"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - if (!options.headers) { - options.headers = {} - } - if (options.method === 'POST' || options.method === 'PUT' || options.method === 'DELETE') { - options.headers['content-length'] = payload.length - } - var sha256sum = '' - if (this.enableSHA256) { - sha256sum = toSha256(payload) - } - var stream = readableStream(payload) - this.makeRequestStream(options, stream, sha256sum, statusCodes, region, returnResponse, cb) - } - - // makeRequestStream will be used directly instead of makeRequest in case the payload - // is available as a stream. for ex. putObject - makeRequestStream(options, stream, sha256sum, statusCodes, region, returnResponse, cb) { - if (!isObject(options)) { - throw new TypeError('options should be of type "object"') - } - if (!isReadableStream(stream)) { - throw new errors.InvalidArgumentError('stream should be a readable Stream') - } - if (!isString(sha256sum)) { - throw new TypeError('sha256sum should be of type "string"') - } - statusCodes.forEach((statusCode) => { - if (!isNumber(statusCode)) { - throw new TypeError('statusCode should be of type "number"') - } - }) - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isBoolean(returnResponse)) { - throw new TypeError('returnResponse should be of type "boolean"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - // sha256sum will be empty for anonymous or https requests - if (!this.enableSHA256 && sha256sum.length !== 0) { - throw new errors.InvalidArgumentError(`sha256sum expected to be empty for anonymous or https requests`) - } - // sha256sum should be valid for non-anonymous http requests. - if (this.enableSHA256 && sha256sum.length !== 64) { - throw new errors.InvalidArgumentError(`Invalid sha256sum : ${sha256sum}`) - } - - var _makeRequest = (e, region) => { - if (e) { - return cb(e) - } - options.region = region - var reqOptions = this.getRequestOptions(options) - if (!this.anonymous) { - // For non-anonymous https requests sha256sum is 'UNSIGNED-PAYLOAD' for signature calculation. - if (!this.enableSHA256) { - sha256sum = 'UNSIGNED-PAYLOAD' - } - - let date = new Date() - - reqOptions.headers['x-amz-date'] = makeDateLong(date) - reqOptions.headers['x-amz-content-sha256'] = sha256sum - if (this.sessionToken) { - reqOptions.headers['x-amz-security-token'] = this.sessionToken - } - - this.checkAndRefreshCreds() - var authorization = signV4(reqOptions, this.accessKey, this.secretKey, region, date) - reqOptions.headers.authorization = authorization - } - var req = this.transport.request(reqOptions, (response) => { - if (!statusCodes.includes(response.statusCode)) { - // For an incorrect region, S3 server always sends back 400. - // But we will do cache invalidation for all errors so that, - // in future, if AWS S3 decides to send a different status code or - // XML error code we will still work fine. - delete this.regionMap[options.bucketName] - var errorTransformer = transformers.getErrorTransformer(response) - pipesetup(response, errorTransformer).on('error', (e) => { - this.logHTTP(reqOptions, response, e) - cb(e) - }) - return - } - this.logHTTP(reqOptions, response) - if (returnResponse) { - return cb(null, response) - } - // We drain the socket so that the connection gets closed. Note that this - // is not expensive as the socket will not have any data. - response.on('data', () => {}) - cb(null) - }) - let pipe = pipesetup(stream, req) - pipe.on('error', (e) => { - this.logHTTP(reqOptions, null, e) - cb(e) - }) - } - if (region) { - return _makeRequest(null, region) - } - this.getBucketRegion(options.bucketName, _makeRequest) - } - - // gets the region of the bucket - getBucketRegion(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - - // Region is set with constructor, return the region right here. - if (this.region) { - return cb(null, this.region) - } - - if (this.regionMap[bucketName]) { - return cb(null, this.regionMap[bucketName]) - } - var extractRegion = (response) => { - var transformer = transformers.getBucketRegionTransformer() - var region = DEFAULT_REGION - pipesetup(response, transformer) - .on('error', cb) - .on('data', (data) => { - if (data) { - region = data - } - }) - .on('end', () => { - this.regionMap[bucketName] = region - cb(null, region) - }) - } - - var method = 'GET' - var query = 'location' - - // `getBucketLocation` behaves differently in following ways for - // different environments. - // - // - For nodejs env we default to path style requests. - // - For browser env path style requests on buckets yields CORS - // error. To circumvent this problem we make a virtual host - // style request signed with 'us-east-1'. This request fails - // with an error 'AuthorizationHeaderMalformed', additionally - // the error XML also provides Region of the bucket. To validate - // this region is proper we retry the same request with the newly - // obtained region. - var pathStyle = this.pathStyle && typeof window === 'undefined' - - this.makeRequest({ method, bucketName, query, pathStyle }, '', [200], DEFAULT_REGION, true, (e, response) => { - if (e) { - if (e.name === 'AuthorizationHeaderMalformed') { - var region = e.Region - if (!region) { - return cb(e) - } - this.makeRequest({ method, bucketName, query }, '', [200], region, true, (e, response) => { - if (e) { - return cb(e) - } - extractRegion(response) - }) - return - } - return cb(e) - } - extractRegion(response) - }) - } - - // Creates the bucket `bucketName`. - // - // __Arguments__ - // * `bucketName` _string_ - Name of the bucket - // * `region` _string_ - region valid values are _us-west-1_, _us-west-2_, _eu-west-1_, _eu-central-1_, _ap-southeast-1_, _ap-northeast-1_, _ap-southeast-2_, _sa-east-1_. - // * `makeOpts` _object_ - Options to create a bucket. e.g {ObjectLocking:true} (Optional) - // * `callback(err)` _function_ - callback function with `err` as the error argument. `err` is null if the bucket is successfully created. - makeBucket(bucketName, region, makeOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - // Backward Compatibility - if (isObject(region)) { - cb = makeOpts - makeOpts = region - region = '' - } - if (isFunction(region)) { - cb = region - region = '' - makeOpts = {} - } - if (isFunction(makeOpts)) { - cb = makeOpts - makeOpts = {} - } - - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isObject(makeOpts)) { - throw new TypeError('makeOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var payload = '' - - // Region already set in constructor, validate if - // caller requested bucket location is same. - if (region && this.region) { - if (region !== this.region) { - throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`) - } - } - // sending makeBucket request with XML containing 'us-east-1' fails. For - // default region server expects the request without body - if (region && region !== DEFAULT_REGION) { - var createBucketConfiguration = [] - createBucketConfiguration.push({ - _attr: { - xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', - }, - }) - createBucketConfiguration.push({ - LocationConstraint: region, - }) - var payloadObject = { - CreateBucketConfiguration: createBucketConfiguration, - } - payload = Xml(payloadObject) - } - var method = 'PUT' - var headers = {} - - if (makeOpts.ObjectLocking) { - headers['x-amz-bucket-object-lock-enabled'] = true - } - - if (!region) { - region = DEFAULT_REGION - } - - const processWithRetry = (err) => { - if (err && (region === '' || region === DEFAULT_REGION)) { - if (err.code === 'AuthorizationHeaderMalformed' && err.region !== '') { - // Retry with region returned as part of error - this.makeRequest({ method, bucketName, headers }, payload, [200], err.region, false, cb) - } else { - return cb && cb(err) - } - } - return cb && cb(err) - } - this.makeRequest({ method, bucketName, headers }, payload, [200], region, false, processWithRetry) - } - - // List of buckets created. - // - // __Arguments__ - // * `callback(err, buckets)` _function_ - callback function with error as the first argument. `buckets` is an array of bucket information - // - // `buckets` array element: - // * `bucket.name` _string_ : bucket name - // * `bucket.creationDate` _Date_: date when bucket was created - listBuckets(cb) { - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'GET' - this.makeRequest({ method }, '', [200], DEFAULT_REGION, true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getListBucketTransformer() - var buckets - pipesetup(response, transformer) - .on('data', (result) => (buckets = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, buckets)) - }) - } - - // Returns a stream that emits objects that are partially uploaded. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: prefix of the object names that are partially uploaded (optional, default `''`) - // * `recursive` _bool_: directory style listing when false, recursive listing when true (optional, default `false`) - // - // __Return Value__ - // * `stream` _Stream_ : emits objects of the format: - // * `object.key` _string_: name of the object - // * `object.uploadId` _string_: upload ID of the object - // * `object.size` _Integer_: size of the partially uploaded object - listIncompleteUploads(bucket, prefix, recursive) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (!isValidBucketName(bucket)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - var delimiter = recursive ? '' : '/' - var keyMarker = '' - var uploadIdMarker = '' - var uploads = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one upload info per _read() - if (uploads.length) { - return readStream.push(uploads.shift()) - } - if (ended) { - return readStream.push(null) - } - this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - result.prefixes.forEach((prefix) => uploads.push(prefix)) - async.eachSeries( - result.uploads, - (upload, cb) => { - // for each incomplete upload add the sizes of its uploaded parts - this.listParts(bucket, upload.key, upload.uploadId, (err, parts) => { - if (err) { - return cb(err) - } - upload.size = parts.reduce((acc, item) => acc + item.size, 0) - uploads.push(upload) - cb() - }) - }, - (err) => { - if (err) { - readStream.emit('error', err) - return - } - if (result.isTruncated) { - keyMarker = result.nextKeyMarker - uploadIdMarker = result.nextUploadIdMarker - } else { - ended = true - } - readStream._read() - }, - ) - }) - } - return readStream - } - - // To check if a bucket already exists. - // - // __Arguments__ - // * `bucketName` _string_ : name of the bucket - // * `callback(err)` _function_ : `err` is `null` if the bucket exists - bucketExists(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'HEAD' - this.makeRequest({ method, bucketName }, '', [200], '', false, (err) => { - if (err) { - if (err.code == 'NoSuchBucket' || err.code == 'NotFound') { - return cb(null, false) - } - return cb(err) - } - cb(null, true) - }) - } - - // Remove a bucket. - // - // __Arguments__ - // * `bucketName` _string_ : name of the bucket - // * `callback(err)` _function_ : `err` is `null` if the bucket is removed successfully. - removeBucket(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'DELETE' - this.makeRequest({ method, bucketName }, '', [204], '', false, (e) => { - // If the bucket was successfully removed, remove the region map entry. - if (!e) { - delete this.regionMap[bucketName] - } - cb(e) - }) - } - - // Remove the partially uploaded object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `callback(err)` _function_: callback function is called with non `null` value in case of error - removeIncompleteUpload(bucketName, objectName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.isValidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var removeUploadId - async.during( - (cb) => { - this.findUploadId(bucketName, objectName, (e, uploadId) => { - if (e) { - return cb(e) - } - removeUploadId = uploadId - cb(null, uploadId) - }) - }, - (cb) => { - var method = 'DELETE' - var query = `uploadId=${removeUploadId}` - this.makeRequest({ method, bucketName, objectName, query }, '', [204], '', false, (e) => cb(e)) - }, - cb, - ) - } - - // Callback is called with `error` in case of error or `null` in case of success - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `filePath` _string_: path to which the object data will be written to - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err)` _function_: callback is called with `err` in case of error. - fGetObject(bucketName, objectName, filePath, getOpts = {}, cb) { - // Input validation. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(filePath)) { - throw new TypeError('filePath should be of type "string"') - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - // Internal data. - var partFile - var partFileStream - var objStat - - // Rename wrapper. - var rename = (err) => { - if (err) { - return cb(err) - } - fs.rename(partFile, filePath, cb) - } - - async.waterfall( - [ - (cb) => this.statObject(bucketName, objectName, getOpts, cb), - (result, cb) => { - objStat = result - // Create any missing top level directories. - mkdirp(path.dirname(filePath), cb) - }, - (ignore, cb) => { - partFile = `${filePath}.${objStat.etag}.part.minio` - fs.stat(partFile, (e, stats) => { - var offset = 0 - if (e) { - partFileStream = fs.createWriteStream(partFile, { flags: 'w' }) - } else { - if (objStat.size === stats.size) { - return rename() - } - offset = stats.size - partFileStream = fs.createWriteStream(partFile, { flags: 'a' }) - } - this.getPartialObject(bucketName, objectName, offset, 0, getOpts, cb) - }) - }, - (downloadStream, cb) => { - pipesetup(downloadStream, partFileStream) - .on('error', (e) => cb(e)) - .on('finish', cb) - }, - (cb) => fs.stat(partFile, cb), - (stats, cb) => { - if (stats.size === objStat.size) { - return cb() - } - cb(new Error('Size mismatch between downloaded file and the object')) - }, - ], - rename, - ) - } - - // Callback is called with readable stream of the object content. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream - getObject(bucketName, objectName, getOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - this.getPartialObject(bucketName, objectName, 0, 0, getOpts, cb) - } - - // Callback is called with readable stream of the partial object content. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `offset` _number_: offset of the object from where the stream will start - // * `length` _number_: length of the object that will be read in the stream (optional, if not specified we read the rest of the file from the offset) - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream - getPartialObject(bucketName, objectName, offset, length, getOpts = {}, cb) { - if (isFunction(length)) { - cb = length - length = 0 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isNumber(offset)) { - throw new TypeError('offset should be of type "number"') - } - if (!isNumber(length)) { - throw new TypeError('length should be of type "number"') - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var range = '' - if (offset || length) { - if (offset) { - range = `bytes=${+offset}-` - } else { - range = 'bytes=0-' - offset = 0 - } - if (length) { - range += `${+length + offset - 1}` - } - } - - var headers = {} - if (range !== '') { - headers.range = range - } - - var expectedStatusCodes = [200] - if (range) { - expectedStatusCodes.push(206) - } - var method = 'GET' - - var query = querystring.stringify(getOpts) - this.makeRequest({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes, '', true, cb) - } - - // Uploads the object using contents from a file - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `filePath` _string_: file path of the file to be uploaded - // * `metaData` _Javascript Object_: metaData assosciated with the object - // * `callback(err, objInfo)` _function_: non null `err` indicates error, `objInfo` _object_ which contains versionId and etag. - fPutObject(bucketName, objectName, filePath, metaData, callback) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (!isString(filePath)) { - throw new TypeError('filePath should be of type "string"') - } - if (isFunction(metaData)) { - callback = metaData - metaData = {} // Set metaData empty if no metaData provided. - } - if (!isObject(metaData)) { - throw new TypeError('metaData should be of type "object"') - } - - // Inserts correct `content-type` attribute based on metaData and filePath - metaData = insertContentType(metaData, filePath) - - // Updates metaData to have the correct prefix if needed - metaData = prependXAMZMeta(metaData) - var size - var partSize - - async.waterfall( - [ - (cb) => fs.stat(filePath, cb), - (stats, cb) => { - size = stats.size - var stream - var cbTriggered = false - var origCb = cb - cb = function () { - if (cbTriggered) { - return - } - cbTriggered = true - if (stream) { - stream.destroy() - } - return origCb.apply(this, arguments) - } - if (size > this.maxObjectSize) { - return cb(new Error(`${filePath} size : ${stats.size}, max allowed size : 5TB`)) - } - if (size <= this.partSize) { - // simple PUT request, no multipart - var multipart = false - var uploader = this.getUploader(bucketName, objectName, metaData, multipart) - var hash = transformers.getHashSummer(this.enableSHA256) - var start = 0 - var end = size - 1 - var autoClose = true - if (size === 0) { - end = 0 - } - var options = { start, end, autoClose } - pipesetup(fs.createReadStream(filePath, options), hash) - .on('data', (data) => { - var md5sum = data.md5sum - var sha256sum = data.sha256sum - stream = fs.createReadStream(filePath, options) - uploader(stream, size, sha256sum, md5sum, (err, objInfo) => { - callback(err, objInfo) - cb(true) - }) - }) - .on('error', (e) => cb(e)) - return - } - this.findUploadId(bucketName, objectName, cb) - }, - (uploadId, cb) => { - // if there was a previous incomplete upload, fetch all its uploaded parts info - if (uploadId) { - return this.listParts(bucketName, objectName, uploadId, (e, etags) => cb(e, uploadId, etags)) - } - // there was no previous upload, initiate a new one - this.initiateNewMultipartUpload(bucketName, objectName, metaData, (e, uploadId) => cb(e, uploadId, [])) - }, - (uploadId, etags, cb) => { - partSize = this.calculatePartSize(size) - var multipart = true - var uploader = this.getUploader(bucketName, objectName, metaData, multipart) - - // convert array to object to make things easy - var parts = etags.reduce(function (acc, item) { - if (!acc[item.part]) { - acc[item.part] = item - } - return acc - }, {}) - var partsDone = [] - var partNumber = 1 - var uploadedSize = 0 - async.whilst( - (cb) => { - cb(null, uploadedSize < size) - }, - (cb) => { - var stream - var cbTriggered = false - var origCb = cb - cb = function () { - if (cbTriggered) { - return - } - cbTriggered = true - if (stream) { - stream.destroy() - } - return origCb.apply(this, arguments) - } - var part = parts[partNumber] - var hash = transformers.getHashSummer(this.enableSHA256) - var length = partSize - if (length > size - uploadedSize) { - length = size - uploadedSize - } - var start = uploadedSize - var end = uploadedSize + length - 1 - var autoClose = true - var options = { autoClose, start, end } - // verify md5sum of each part - pipesetup(fs.createReadStream(filePath, options), hash) - .on('data', (data) => { - var md5sumHex = Buffer.from(data.md5sum, 'base64').toString('hex') - if (part && md5sumHex === part.etag) { - // md5 matches, chunk already uploaded - partsDone.push({ part: partNumber, etag: part.etag }) - partNumber++ - uploadedSize += length - return cb() - } - // part is not uploaded yet, or md5 mismatch - stream = fs.createReadStream(filePath, options) - uploader(uploadId, partNumber, stream, length, data.sha256sum, data.md5sum, (e, objInfo) => { - if (e) { - return cb(e) - } - partsDone.push({ part: partNumber, etag: objInfo.etag }) - partNumber++ - uploadedSize += length - return cb() - }) - }) - .on('error', (e) => cb(e)) - }, - (e) => { - if (e) { - return cb(e) - } - cb(null, partsDone, uploadId) - }, - ) - }, - // all parts uploaded, complete the multipart upload - (etags, uploadId, cb) => this.completeMultipartUpload(bucketName, objectName, uploadId, etags, cb), - ], - (err, ...rest) => { - if (err === true) { - return - } - callback(err, ...rest) - }, - ) - } - - // Uploads the object. - // - // Uploading a stream - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `stream` _Stream_: Readable stream - // * `size` _number_: size of the object (optional) - // * `callback(err, etag)` _function_: non null `err` indicates error, `etag` _string_ is the etag of the object uploaded. - // - // Uploading "Buffer" or "string" - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `string or Buffer` _string_ or _Buffer_: string or buffer - // * `callback(err, objInfo)` _function_: `err` is `null` in case of success and `info` will have the following object details: - // * `etag` _string_: etag of the object - // * `versionId` _string_: versionId of the object - putObject(bucketName, objectName, stream, size, metaData, callback) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - // We'll need to shift arguments to the left because of size and metaData. - if (isFunction(size)) { - callback = size - metaData = {} - } else if (isFunction(metaData)) { - callback = metaData - metaData = {} - } - - // We'll need to shift arguments to the left because of metaData - // and size being optional. - if (isObject(size)) { - metaData = size - } - - // Ensures Metadata has appropriate prefix for A3 API - metaData = prependXAMZMeta(metaData) - if (typeof stream === 'string' || stream instanceof Buffer) { - // Adapts the non-stream interface into a stream. - size = stream.length - stream = readableStream(stream) - } else if (!isReadableStream(stream)) { - throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"') - } - - if (!isFunction(callback)) { - throw new TypeError('callback should be of type "function"') - } - - if (isNumber(size) && size < 0) { - throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`) - } - - // Get the part size and forward that to the BlockStream. Default to the - // largest block size possible if necessary. - if (!isNumber(size)) { - size = this.maxObjectSize - } - - size = this.calculatePartSize(size) - - // s3 requires that all non-end chunks be at least `this.partSize`, - // so we chunk the stream until we hit either that size or the end before - // we flush it to s3. - let chunker = new BlockStream2({ size, zeroPadding: false }) - - // This is a Writable stream that can be written to in order to upload - // to the specified bucket and object automatically. - let uploader = new ObjectUploader(this, bucketName, objectName, size, metaData, callback) - // stream => chunker => uploader - pipesetup(stream, chunker, uploader) - } - - // Copy the object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `srcObject` _string_: path of the source object to be copied - // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) - // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object - copyObjectV1(arg1, arg2, arg3, arg4, arg5) { - var bucketName = arg1 - var objectName = arg2 - var srcObject = arg3 - var conditions, cb - if (typeof arg4 == 'function' && arg5 === undefined) { - conditions = null - cb = arg4 - } else { - conditions = arg4 - cb = arg5 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(srcObject)) { - throw new TypeError('srcObject should be of type "string"') - } - if (srcObject === '') { - throw new errors.InvalidPrefixError(`Empty source prefix`) - } - - if (conditions !== null && !(conditions instanceof CopyConditions)) { - throw new TypeError('conditions should be of type "CopyConditions"') - } - - var headers = {} - headers['x-amz-copy-source'] = uriResourceEscape(srcObject) - - if (conditions !== null) { - if (conditions.modified !== '') { - headers['x-amz-copy-source-if-modified-since'] = conditions.modified - } - if (conditions.unmodified !== '') { - headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified - } - if (conditions.matchETag !== '') { - headers['x-amz-copy-source-if-match'] = conditions.matchETag - } - if (conditions.matchEtagExcept !== '') { - headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept - } - } - - var method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => cb(null, data)) - }) - } - - /** - * Internal Method to perform copy of an object. - * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions - * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions - * @param cb __function__ called with null if there is an error - * @returns Promise if no callack is passed. - */ - copyObjectV2(sourceConfig, destConfig, cb) { - if (!(sourceConfig instanceof CopySourceOptions)) { - throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') - } - if (!(destConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - if (!destConfig.validate()) { - return false - } - if (!destConfig.validate()) { - return false - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) - - const bucketName = destConfig.Bucket - const objectName = destConfig.Object - - const method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - const transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => { - const resHeaders = response.headers - - const copyObjResponse = { - Bucket: destConfig.Bucket, - Key: destConfig.Object, - LastModified: data.LastModified, - MetaData: extractMetadata(resHeaders), - VersionId: getVersionId(resHeaders), - SourceVersionId: getSourceVersionId(resHeaders), - Etag: sanitizeETag(resHeaders.etag), - Size: +resHeaders['content-length'], - } - - return cb(null, copyObjResponse) - }) - }) - } - - // Backward compatibility for Copy Object API. - copyObject(...allArgs) { - if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { - return this.copyObjectV2(...arguments) - } - return this.copyObjectV1(...arguments) - } - - // list a batch of objects - listObjectsQuery(bucketName, prefix, marker, listQueryOpts = {}) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(marker)) { - throw new TypeError('marker should be of type "string"') - } - let { Delimiter, MaxKeys, IncludeVersion } = listQueryOpts - - if (!isObject(listQueryOpts)) { - throw new TypeError('listQueryOpts should be of type "object"') - } - - if (!isString(Delimiter)) { - throw new TypeError('Delimiter should be of type "string"') - } - if (!isNumber(MaxKeys)) { - throw new TypeError('MaxKeys should be of type "number"') - } - - const queries = [] - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(Delimiter)}`) - queries.push(`encoding-type=url`) - - if (IncludeVersion) { - queries.push(`versions`) - } - - if (marker) { - marker = uriEscape(marker) - if (IncludeVersion) { - queries.push(`key-marker=${marker}`) - } else { - queries.push(`marker=${marker}`) - } - } - - // no need to escape maxKeys - if (MaxKeys) { - if (MaxKeys >= 1000) { - MaxKeys = 1000 - } - queries.push(`max-keys=${MaxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - - var method = 'GET' - var transformer = transformers.getListObjectsTransformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // List the objects in the bucket. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `listOpts _object_: query params to list object with below keys - // * listOpts.MaxKeys _int_ maximum number of keys to return - // * listOpts.IncludeVersion _bool_ true|false to include versions. - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - // * `obj.isDeleteMarker` _boolean_: true if it is a delete marker - // * `obj.versionId` _string_: versionId of the object - listObjects(bucketName, prefix, recursive, listOpts = {}) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isObject(listOpts)) { - throw new TypeError('listOpts should be of type "object"') - } - var marker = '' - const listQueryOpts = { - Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/' - MaxKeys: 1000, - IncludeVersion: listOpts.IncludeVersion, - } - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - marker = result.nextMarker || result.versionIdMarker - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // listObjectsV2Query - (List Objects V2) - List some or all (up to 1000) of the objects in a bucket. - // - // You can use the request parameters as selection criteria to return a subset of the objects in a bucket. - // request parameters :- - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: Limits the response to keys that begin with the specified prefix. - // * `continuation-token` _string_: Used to continue iterating over a set of objects. - // * `delimiter` _string_: A delimiter is a character you use to group keys. - // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. - // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. - listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(continuationToken)) { - throw new TypeError('continuationToken should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - if (!isNumber(maxKeys)) { - throw new TypeError('maxKeys should be of type "number"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - var queries = [] - - // Call for listing objects v2 API - queries.push(`list-type=2`) - queries.push(`encoding-type=url`) - - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - - if (continuationToken) { - continuationToken = uriEscape(continuationToken) - queries.push(`continuation-token=${continuationToken}`) - } - // Set start-after - if (startAfter) { - startAfter = uriEscape(startAfter) - queries.push(`start-after=${startAfter}`) - } - // no need to escape maxKeys - if (maxKeys) { - if (maxKeys >= 1000) { - maxKeys = 1000 - } - queries.push(`max-keys=${maxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListObjectsV2Transformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // List the objects in the bucket using S3 ListObjects V2 - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) - // - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - listObjectsV2(bucketName, prefix, recursive, startAfter) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (startAfter === undefined) { - startAfter = '' - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - // if recursive is false set delimiter to '/' - var delimiter = recursive ? '' : '/' - var continuationToken = '' - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - continuationToken = result.nextContinuationToken - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // Stat information of the object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `statOpts` _object_ : Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional). - // * `callback(err, stat)` _function_: `err` is not `null` in case of error, `stat` contains the object information: - // * `stat.size` _number_: size of the object - // * `stat.etag` _string_: etag of the object - // * `stat.metaData` _string_: MetaData of the object - // * `stat.lastModified` _Date_: modified time stamp - // * `stat.versionId` _string_: version id of the object if available - statObject(bucketName, objectName, statOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // backward compatibility - if (isFunction(statOpts)) { - cb = statOpts - statOpts = {} - } - - if (!isObject(statOpts)) { - throw new errors.InvalidArgumentError('statOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var query = querystring.stringify(statOpts) - var method = 'HEAD' - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - // We drain the socket so that the connection gets closed. Note that this - // is not expensive as the socket will not have any data. - response.on('data', () => {}) - - const result = { - size: +response.headers['content-length'], - metaData: extractMetadata(response.headers), - lastModified: new Date(response.headers['last-modified']), - versionId: getVersionId(response.headers), - etag: sanitizeETag(response.headers.etag), - } - - cb(null, result) - }) - } - - // Remove the specified object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `removeOpts` _object_: Version of the object in the form `{versionId:'my-uuid', governanceBypass:true|false, forceDelete:true|false}`. Default is `{}`. (optional) - // * `callback(err)` _function_: callback function is called with non `null` value in case of error - removeObject(bucketName, objectName, removeOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // backward compatibility - if (isFunction(removeOpts)) { - cb = removeOpts - removeOpts = {} - } - - if (!isObject(removeOpts)) { - throw new errors.InvalidArgumentError('removeOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - const method = 'DELETE' - const queryParams = {} - - if (removeOpts.versionId) { - queryParams.versionId = `${removeOpts.versionId}` - } - const headers = {} - if (removeOpts.governanceBypass) { - headers['X-Amz-Bypass-Governance-Retention'] = true - } - if (removeOpts.forceDelete) { - headers['x-minio-force-delete'] = true - } - - const query = querystring.stringify(queryParams) - - let requestOptions = { method, bucketName, objectName, headers } - if (query) { - requestOptions['query'] = query - } - - this.makeRequest(requestOptions, '', [200, 204], '', false, cb) - } - - // Remove all the objects residing in the objectsList. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectsList` _array_: array of objects of one of the following: - // * List of Object names as array of strings which are object keys: ['objectname1','objectname2'] - // * List of Object name and versionId as an object: [{name:"objectname",versionId:"my-version-id"}] - - removeObjects(bucketName, objectsList, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isArray(objectsList)) { - throw new errors.InvalidArgumentError('objectsList should be a list') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const maxEntries = 1000 - const query = 'delete' - const method = 'POST' - - let result = objectsList.reduce( - (result, entry) => { - result.list.push(entry) - if (result.list.length === maxEntries) { - result.listOfList.push(result.list) - result.list = [] - } - return result - }, - { listOfList: [], list: [] }, - ) - - if (result.list.length > 0) { - result.listOfList.push(result.list) - } - - const encoder = new TextEncoder() - const batchResults = [] - - async.eachSeries( - result.listOfList, - (list, batchCb) => { - var objects = [] - list.forEach(function (value) { - if (isObject(value)) { - objects.push({ Key: value.name, VersionId: value.versionId }) - } else { - objects.push({ Key: value }) - } - }) - let deleteObjects = { Delete: { Quiet: true, Object: objects } } - const builder = new xml2js.Builder({ headless: true }) - let payload = builder.buildObject(deleteObjects) - payload = encoder.encode(payload) - const headers = {} - - headers['Content-MD5'] = toMd5(payload) - - let removeObjectsResult - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', true, (e, response) => { - if (e) { - return batchCb(e) - } - pipesetup(response, transformers.removeObjectsTransformer()) - .on('data', (data) => { - removeObjectsResult = data - }) - .on('error', (e) => { - return batchCb(e, null) - }) - .on('end', () => { - batchResults.push(removeObjectsResult) - return batchCb(null, removeObjectsResult) - }) - }) - }, - () => { - cb(null, _.flatten(batchResults)) - }, - ) - } - - // Get the policy on a bucket or an object prefix. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `callback(err, policy)` _function_: callback function - getBucketPolicy(bucketName, cb) { - // Validate arguments. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let method = 'GET' - let query = 'policy' - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let policy = Buffer.from('') - pipesetup(response, transformers.getConcater()) - .on('data', (data) => (policy = data)) - .on('error', cb) - .on('end', () => { - cb(null, policy.toString()) - }) - }) - } - - // Set the policy on a bucket or an object prefix. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `bucketPolicy` _string_: bucket policy (JSON stringify'ed) - // * `callback(err)` _function_: callback function - setBucketPolicy(bucketName, policy, cb) { - // Validate arguments. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isString(policy)) { - throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let method = 'DELETE' - let query = 'policy' - - if (policy) { - method = 'PUT' - } - - this.makeRequest({ method, bucketName, query }, policy, [204], '', false, cb) - } - - // Generate a generic presigned URL which can be - // used for HTTP methods GET, PUT, HEAD and DELETE - // - // __Arguments__ - // * `method` _string_: name of the HTTP method - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - // * `reqParams` _object_: request parameters (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} - // * `requestDate` _Date_: A date object, the url will be issued at (optional) - presignedUrl(method, bucketName, objectName, expires, reqParams, requestDate, cb) { - if (this.anonymous) { - throw new errors.AnonymousRequestError('Presigned ' + method + ' url cannot be generated for anonymous requests') - } - if (isFunction(requestDate)) { - cb = requestDate - requestDate = new Date() - } - if (isFunction(reqParams)) { - cb = reqParams - reqParams = {} - requestDate = new Date() - } - if (isFunction(expires)) { - cb = expires - reqParams = {} - expires = 24 * 60 * 60 * 7 // 7 days in seconds - requestDate = new Date() - } - if (!isNumber(expires)) { - throw new TypeError('expires should be of type "number"') - } - if (!isObject(reqParams)) { - throw new TypeError('reqParams should be of type "object"') - } - if (!isValidDate(requestDate)) { - throw new TypeError('requestDate should be of type "Date" and valid') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var query = querystring.stringify(reqParams) - this.getBucketRegion(bucketName, (e, region) => { - if (e) { - return cb(e) - } - // This statement is added to ensure that we send error through - // callback on presign failure. - var url - var reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query }) - - this.checkAndRefreshCreds() - try { - url = presignSignatureV4( - reqOptions, - this.accessKey, - this.secretKey, - this.sessionToken, - region, - requestDate, - expires, - ) - } catch (pe) { - return cb(pe) - } - cb(null, url) - }) - } - - // Generate a presigned URL for GET - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - // * `respHeaders` _object_: response headers to override or request params for query (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} - // * `requestDate` _Date_: A date object, the url will be issued at (optional) - presignedGetObject(bucketName, objectName, expires, respHeaders, requestDate, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (isFunction(respHeaders)) { - cb = respHeaders - respHeaders = {} - requestDate = new Date() - } - - var validRespHeaders = [ - 'response-content-type', - 'response-content-language', - 'response-expires', - 'response-cache-control', - 'response-content-disposition', - 'response-content-encoding', - ] - validRespHeaders.forEach((header) => { - if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) { - throw new TypeError(`response header ${header} should be of type "string"`) - } - }) - return this.presignedUrl('GET', bucketName, objectName, expires, respHeaders, requestDate, cb) - } - - // Generate a presigned URL for PUT. Using this URL, the browser can upload to S3 only with the specified object name. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - presignedPutObject(bucketName, objectName, expires, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - return this.presignedUrl('PUT', bucketName, objectName, expires, cb) - } - - // return PostPolicy object - newPostPolicy() { - return new PostPolicy() - } - - // presignedPostPolicy can be used in situations where we want more control on the upload than what - // presignedPutObject() provides. i.e Using presignedPostPolicy we will be able to put policy restrictions - // on the object's `name` `bucket` `expiry` `Content-Type` `Content-Disposition` `metaData` - presignedPostPolicy(postPolicy, cb) { - if (this.anonymous) { - throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests') - } - if (!isObject(postPolicy)) { - throw new TypeError('postPolicy should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - this.getBucketRegion(postPolicy.formData.bucket, (e, region) => { - if (e) { - return cb(e) - } - var date = new Date() - var dateStr = makeDateLong(date) - - this.checkAndRefreshCreds() - - if (!postPolicy.policy.expiration) { - // 'expiration' is mandatory field for S3. - // Set default expiration date of 7 days. - var expires = new Date() - expires.setSeconds(24 * 60 * 60 * 7) - postPolicy.setExpires(expires) - } - - postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr]) - postPolicy.formData['x-amz-date'] = dateStr - - postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256']) - postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' - - postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)]) - postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date) - - if (this.sessionToken) { - postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken]) - postPolicy.formData['x-amz-security-token'] = this.sessionToken - } - - var policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64') - - postPolicy.formData.policy = policyBase64 - - var signature = postPresignSignatureV4(region, date, this.secretKey, policyBase64) - - postPolicy.formData['x-amz-signature'] = signature - var opts = {} - opts.region = region - opts.bucketName = postPolicy.formData.bucket - var reqOptions = this.getRequestOptions(opts) - var portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}` - var urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}` - cb(null, { postURL: urlStr, formData: postPolicy.formData }) - }) - } - - // Calls implemented below are related to multipart. - - // Initiate a new multipart upload. - initiateNewMultipartUpload(bucketName, objectName, metaData, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(metaData)) { - throw new errors.InvalidObjectNameError('contentType should be of type "object"') - } - var method = 'POST' - let headers = Object.assign({}, metaData) - var query = 'uploads' - this.makeRequest({ method, bucketName, objectName, query, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getInitiateMultipartTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (uploadId) => cb(null, uploadId)) - }) - } - - // Complete the multipart upload. After all the parts are uploaded issuing - // this call will aggregate the parts on the server into a single object. - completeMultipartUpload(bucketName, objectName, uploadId, etags, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isObject(etags)) { - throw new TypeError('etags should be of type "Array"') - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - - var method = 'POST' - var query = `uploadId=${uriEscape(uploadId)}` - - var parts = [] - - etags.forEach((element) => { - parts.push({ - Part: [ - { - PartNumber: element.part, - }, - { - ETag: element.etag, - }, - ], - }) - }) - - var payloadObject = { CompleteMultipartUpload: parts } - var payload = Xml(payloadObject) - - this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCompleteMultipartTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (result) => { - if (result.errCode) { - // Multipart Complete API returns an error XML after a 200 http status - cb(new errors.S3Error(result.errMessage)) - } else { - const completeMultipartResult = { - etag: result.etag, - versionId: getVersionId(response.headers), - } - cb(null, completeMultipartResult) - } - }) - }) - } - - // Get part-info of all parts of an incomplete upload specified by uploadId. - listParts(bucketName, objectName, uploadId, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - var parts = [] - var listNext = (marker) => { - this.listPartsQuery(bucketName, objectName, uploadId, marker, (e, result) => { - if (e) { - cb(e) - return - } - parts = parts.concat(result.parts) - if (result.isTruncated) { - listNext(result.marker) - return - } - cb(null, parts) - }) - } - listNext(0) - } - - // Called by listParts to fetch a batch of part-info - listPartsQuery(bucketName, objectName, uploadId, marker, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isNumber(marker)) { - throw new TypeError('marker should be of type "number"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - var query = '' - if (marker && marker !== 0) { - query += `part-number-marker=${marker}&` - } - query += `uploadId=${uriEscape(uploadId)}` - - var method = 'GET' - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getListPartsTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => cb(null, data)) - }) - } - - // Called by listIncompleteUploads to fetch a batch of incomplete uploads. - listIncompleteUploadsQuery(bucketName, prefix, keyMarker, uploadIdMarker, delimiter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(keyMarker)) { - throw new TypeError('keyMarker should be of type "string"') - } - if (!isString(uploadIdMarker)) { - throw new TypeError('uploadIdMarker should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - var queries = [] - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - - if (keyMarker) { - keyMarker = uriEscape(keyMarker) - queries.push(`key-marker=${keyMarker}`) - } - if (uploadIdMarker) { - queries.push(`upload-id-marker=${uploadIdMarker}`) - } - - var maxUploads = 1000 - queries.push(`max-uploads=${maxUploads}`) - queries.sort() - queries.unshift('uploads') - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListMultipartTransformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // Find uploadId of an incomplete upload. - findUploadId(bucketName, objectName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - var latestUpload - var listNext = (keyMarker, uploadIdMarker) => { - this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '') - .on('error', (e) => cb(e)) - .on('data', (result) => { - result.uploads.forEach((upload) => { - if (upload.key === objectName) { - if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) { - latestUpload = upload - return - } - } - }) - if (result.isTruncated) { - listNext(result.nextKeyMarker, result.nextUploadIdMarker) - return - } - if (latestUpload) { - return cb(null, latestUpload.uploadId) - } - cb(null, undefined) - }) - } - listNext('', '') - } - - // Returns a function that can be used for uploading objects. - // If multipart === true, it returns function that is used to upload - // a part of the multipart. - getUploader(bucketName, objectName, metaData, multipart) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isBoolean(multipart)) { - throw new TypeError('multipart should be of type "boolean"') - } - if (!isObject(metaData)) { - throw new TypeError('metadata should be of type "object"') - } - - var validate = (stream, length, sha256sum, md5sum, cb) => { - if (!isReadableStream(stream)) { - throw new TypeError('stream should be of type "Stream"') - } - if (!isNumber(length)) { - throw new TypeError('length should be of type "number"') - } - if (!isString(sha256sum)) { - throw new TypeError('sha256sum should be of type "string"') - } - if (!isString(md5sum)) { - throw new TypeError('md5sum should be of type "string"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - } - var simpleUploader = (...args) => { - validate(...args) - var query = '' - upload(query, ...args) - } - var multipartUploader = (uploadId, partNumber, ...rest) => { - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isNumber(partNumber)) { - throw new TypeError('partNumber should be of type "number"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('Empty uploadId') - } - if (!partNumber) { - throw new errors.InvalidArgumentError('partNumber cannot be 0') - } - validate(...rest) - var query = `partNumber=${partNumber}&uploadId=${uriEscape(uploadId)}` - upload(query, ...rest) - } - var upload = (query, stream, length, sha256sum, md5sum, cb) => { - var method = 'PUT' - let headers = { 'Content-Length': length } - - if (!multipart) { - headers = Object.assign({}, metaData, headers) - } - - if (!this.enableSHA256) { - headers['Content-MD5'] = md5sum - } - this.makeRequestStream( - { method, bucketName, objectName, query, headers }, - stream, - sha256sum, - [200], - '', - true, - (e, response) => { - if (e) { - return cb(e) - } - const result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - cb(null, result) - }, - ) - } - if (multipart) { - return multipartUploader - } - return simpleUploader - } - - // Remove all the notification configurations in the S3 provider - setBucketNotification(bucketName, config, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(config)) { - throw new TypeError('notification config should be of type "Object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'PUT' - var query = 'notification' - var builder = new xml2js.Builder({ - rootName: 'NotificationConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - var payload = builder.buildObject(config) - this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) - } - - removeAllBucketNotification(bucketName, cb) { - this.setBucketNotification(bucketName, new NotificationConfig(), cb) - } - - // Return the list of notification configurations stored - // in the S3 provider - getBucketNotification(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'GET' - var query = 'notification' - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getBucketNotificationTransformer() - var bucketNotification - pipesetup(response, transformer) - .on('data', (result) => (bucketNotification = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, bucketNotification)) - }) - } - - // Listens for bucket notifications. Returns an EventEmitter. - listenBucketNotification(bucketName, prefix, suffix, events) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix must be of type string') - } - if (!isString(suffix)) { - throw new TypeError('suffix must be of type string') - } - if (!isArray(events)) { - throw new TypeError('events must be of type Array') - } - let listener = new NotificationPoller(this, bucketName, prefix, suffix, events) - listener.start() - - return listener - } - - getBucketVersioning(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - var method = 'GET' - var query = 'versioning' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let versionConfig = Buffer.from('') - pipesetup(response, transformers.bucketVersioningTransformer()) - .on('data', (data) => { - versionConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, versionConfig) - }) - }) - } - - setBucketVersioning(bucketName, versionConfig, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!Object.keys(versionConfig).length) { - throw new errors.InvalidArgumentError('versionConfig should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var method = 'PUT' - var query = 'versioning' - var builder = new xml2js.Builder({ - rootName: 'VersioningConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - var payload = builder.buildObject(versionConfig) - - this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) - } - - /** To set Tags on a bucket or object based on the params - * __Arguments__ - * taggingParams _object_ Which contains the following properties - * bucketName _string_, - * objectName _string_ (Optional), - * tags _object_ of the form {'':'','':''} - * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setTagging(taggingParams) { - const { bucketName, objectName, tags, putOpts = {}, cb } = taggingParams - const method = 'PUT' - let query = 'tagging' - - if (putOpts && putOpts.versionId) { - query = `${query}&versionId=${putOpts.versionId}` - } - const tagsList = [] - for (const [key, value] of Object.entries(tags)) { - tagsList.push({ Key: key, Value: value }) - } - const taggingConfig = { - Tagging: { - TagSet: { - Tag: tagsList, - }, - }, - } - const encoder = new TextEncoder() - const headers = {} - const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } }) - let payload = builder.buildObject(taggingConfig) - payload = encoder.encode(payload) - headers['Content-MD5'] = toMd5(payload) - const requestOptions = { method, bucketName, query, headers } - - if (objectName) { - requestOptions['objectName'] = objectName - } - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest(requestOptions, payload, [200], '', false, cb) - } - - /** Set Tags on a Bucket - * __Arguments__ - * bucketName _string_ - * tags _object_ of the form {'':'','':''} - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setBucketTagging(bucketName, tags, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(tags)) { - throw new errors.InvalidArgumentError('tags should be of type "object"') - } - if (Object.keys(tags).length > 10) { - throw new errors.InvalidArgumentError('maximum tags allowed is 10"') - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - return this.setTagging({ bucketName, tags, cb }) - } - - /** Set Tags on an Object - * __Arguments__ - * bucketName _string_ - * objectName _string_ - * * tags _object_ of the form {'':'','':''} - * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setObjectTagging(bucketName, objectName, tags, putOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - - if (isFunction(putOpts)) { - cb = putOpts - putOpts = {} - } - - if (!isObject(tags)) { - throw new errors.InvalidArgumentError('tags should be of type "object"') - } - if (Object.keys(tags).length > 10) { - throw new errors.InvalidArgumentError('Maximum tags allowed is 10"') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - return this.setTagging({ bucketName, objectName, tags, putOpts, cb }) - } - - /** Remove Tags on an Bucket/Object based on params - * __Arguments__ - * bucketName _string_ - * objectName _string_ (optional) - * removeOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeTagging({ bucketName, objectName, removeOpts, cb }) { - const method = 'DELETE' - let query = 'tagging' - - if (removeOpts && Object.keys(removeOpts).length && removeOpts.versionId) { - query = `${query}&versionId=${removeOpts.versionId}` - } - const requestOptions = { method, bucketName, objectName, query } - - if (objectName) { - requestOptions['objectName'] = objectName - } - this.makeRequest(requestOptions, '', [200, 204], '', true, cb) - } - - /** Remove Tags associated with a bucket - * __Arguments__ - * bucketName _string_ - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeBucketTagging(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - return this.removeTagging({ bucketName, cb }) - } - - /** Remove tags associated with an object - * __Arguments__ - * bucketName _string_ - * objectName _string_ - * removeOpts _object_ (Optional) e.g. {VersionID:"my-object-version-id"} - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeObjectTagging(bucketName, objectName, removeOpts, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - if (isFunction(removeOpts)) { - cb = removeOpts - removeOpts = {} - } - if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) { - throw new errors.InvalidArgumentError('removeOpts should be of type "object"') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - return this.removeTagging({ bucketName, objectName, removeOpts, cb }) - } - - /** Get Tags associated with a Bucket - * __Arguments__ - * bucketName _string_ - * `cb(error, tags)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - getBucketTagging(bucketName, cb) { - const method = 'GET' - const query = 'tagging' - const requestOptions = { method, bucketName, query } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - var transformer = transformers.getTagsTransformer() - if (e) { - return cb(e) - } - let tagsList - pipesetup(response, transformer) - .on('data', (result) => (tagsList = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, tagsList)) - }) - } - - /** Get the tags associated with a bucket OR an object - * bucketName _string_ - * objectName _string_ (Optional) - * getOpts _object_ (Optional) e.g {versionId:"my-object-version-id"} - * `cb(error, tags)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - getObjectTagging(bucketName, objectName, getOpts = {}, cb = () => false) { - const method = 'GET' - let query = 'tagging' - - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - if (!isObject(getOpts)) { - throw new errors.InvalidArgumentError('getOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - if (getOpts && getOpts.versionId) { - query = `${query}&versionId=${getOpts.versionId}` - } - const requestOptions = { method, bucketName, query } - if (objectName) { - requestOptions['objectName'] = objectName - } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - const transformer = transformers.getTagsTransformer() - if (e) { - return cb(e) - } - let tagsList - pipesetup(response, transformer) - .on('data', (result) => (tagsList = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, tagsList)) - }) - } - - /** Put lifecycle configuration on a bucket. - /** Apply lifecycle configuration on a bucket. - * bucketName _string_ - * policyConfig _object_ a valid policy configuration object. - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - applyBucketLifecycle(bucketName, policyConfig, cb) { - const method = 'PUT' - const query = 'lifecycle' - - const encoder = new TextEncoder() - const headers = {} - const builder = new xml2js.Builder({ - rootName: 'LifecycleConfiguration', - headless: true, - renderOpts: { pretty: false }, - }) - let payload = builder.buildObject(policyConfig) - payload = encoder.encode(payload) - const requestOptions = { method, bucketName, query, headers } - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest(requestOptions, payload, [200], '', false, cb) - } - - /** Remove lifecycle configuration of a bucket. - * bucketName _string_ - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeBucketLifecycle(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'DELETE' - const query = 'lifecycle' - this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) - } - - /** Set/Override lifecycle configuration on a bucket. if the configuration is empty, it removes the configuration. - * bucketName _string_ - * lifeCycleConfig _object_ one of the following values: (null or '') to remove the lifecycle configuration. or a valid lifecycle configuration - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setBucketLifecycle(bucketName, lifeCycleConfig = null, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (_.isEmpty(lifeCycleConfig)) { - this.removeBucketLifecycle(bucketName, cb) - } else { - this.applyBucketLifecycle(bucketName, lifeCycleConfig, cb) - } - } - - /** Get lifecycle configuration on a bucket. - * bucketName _string_ - * `cb(config)` _function_ - callback function with lifecycle configuration as the error argument. - */ - getBucketLifecycle(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'GET' - const query = 'lifecycle' - const requestOptions = { method, bucketName, query } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - const transformer = transformers.lifecycleTransformer() - if (e) { - return cb(e) - } - let lifecycleConfig - pipesetup(response, transformer) - .on('data', (result) => (lifecycleConfig = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, lifecycleConfig)) - }) - } - - setObjectLockConfig(bucketName, lockConfigOpts = {}, cb) { - const retentionModes = [RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE] - const validUnits = [RETENTION_VALIDITY_UNITS.DAYS, RETENTION_VALIDITY_UNITS.YEARS] - - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - - if (lockConfigOpts.mode && !retentionModes.includes(lockConfigOpts.mode)) { - throw new TypeError(`lockConfigOpts.mode should be one of ${retentionModes}`) - } - if (lockConfigOpts.unit && !validUnits.includes(lockConfigOpts.unit)) { - throw new TypeError(`lockConfigOpts.unit should be one of ${validUnits}`) - } - if (lockConfigOpts.validity && !isNumber(lockConfigOpts.validity)) { - throw new TypeError(`lockConfigOpts.validity should be a number`) - } - - const method = 'PUT' - const query = 'object-lock' - - let config = { - ObjectLockEnabled: 'Enabled', - } - const configKeys = Object.keys(lockConfigOpts) - // Check if keys are present and all keys are present. - if (configKeys.length > 0) { - if (_.difference(configKeys, ['unit', 'mode', 'validity']).length !== 0) { - throw new TypeError( - `lockConfigOpts.mode,lockConfigOpts.unit,lockConfigOpts.validity all the properties should be specified.`, - ) - } else { - config.Rule = { - DefaultRetention: {}, - } - if (lockConfigOpts.mode) { - config.Rule.DefaultRetention.Mode = lockConfigOpts.mode - } - if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.DAYS) { - config.Rule.DefaultRetention.Days = lockConfigOpts.validity - } else if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.YEARS) { - config.Rule.DefaultRetention.Years = lockConfigOpts.validity - } - } - } - - const builder = new xml2js.Builder({ - rootName: 'ObjectLockConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - const payload = builder.buildObject(config) - - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getObjectLockConfig(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'object-lock' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let objectLockConfig = Buffer.from('') - pipesetup(response, transformers.objectLockTransformer()) - .on('data', (data) => { - objectLockConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, objectLockConfig) - }) - }) - } - - putObjectRetention(bucketName, objectName, retentionOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(retentionOpts)) { - throw new errors.InvalidArgumentError('retentionOpts should be of type "object"') - } else { - if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) { - throw new errors.InvalidArgumentError('Invalid value for governanceBypass', retentionOpts.governanceBypass) - } - if ( - retentionOpts.mode && - ![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode) - ) { - throw new errors.InvalidArgumentError('Invalid object retention mode ', retentionOpts.mode) - } - if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) { - throw new errors.InvalidArgumentError('Invalid value for retainUntilDate', retentionOpts.retainUntilDate) - } - if (retentionOpts.versionId && !isString(retentionOpts.versionId)) { - throw new errors.InvalidArgumentError('Invalid value for versionId', retentionOpts.versionId) - } - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'PUT' - let query = 'retention' - - const headers = {} - if (retentionOpts.governanceBypass) { - headers['X-Amz-Bypass-Governance-Retention'] = true - } - - const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true }) - const params = {} - - if (retentionOpts.mode) { - params.Mode = retentionOpts.mode - } - if (retentionOpts.retainUntilDate) { - params.RetainUntilDate = retentionOpts.retainUntilDate - } - if (retentionOpts.versionId) { - query += `&versionId=${retentionOpts.versionId}` - } - - let payload = builder.buildObject(params) - - headers['Content-MD5'] = toMd5(payload) - this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200, 204], '', false, cb) - } - - getObjectRetention(bucketName, objectName, getOpts, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(getOpts)) { - throw new errors.InvalidArgumentError('callback should be of type "object"') - } else if (getOpts.versionId && !isString(getOpts.versionId)) { - throw new errors.InvalidArgumentError('VersionID should be of type "string"') - } - if (cb && !isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - let query = 'retention' - if (getOpts.versionId) { - query += `&versionId=${getOpts.versionId}` - } - - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let retentionConfig = Buffer.from('') - pipesetup(response, transformers.objectRetentionTransformer()) - .on('data', (data) => { - retentionConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, retentionConfig) - }) - }) - } - - setBucketEncryption(bucketName, encryptionConfig, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - - if (isFunction(encryptionConfig)) { - cb = encryptionConfig - encryptionConfig = null - } - - if (!_.isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) { - throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed.: ' + encryptionConfig.Rule) - } - if (cb && !isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let encryptionObj = encryptionConfig - if (_.isEmpty(encryptionConfig)) { - encryptionObj = { - // Default MinIO Server Supported Rule - Rule: [ - { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: 'AES256', - }, - }, - ], - } - } - - let method = 'PUT' - let query = 'encryption' - let builder = new xml2js.Builder({ - rootName: 'ServerSideEncryptionConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - let payload = builder.buildObject(encryptionObj) - - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getBucketEncryption(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'encryption' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let bucketEncConfig = Buffer.from('') - pipesetup(response, transformers.bucketEncryptionTransformer()) - .on('data', (data) => { - bucketEncConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, bucketEncConfig) - }) - }) - } - removeBucketEncryption(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'DELETE' - const query = 'encryption' - - this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) - } - - setBucketReplication(bucketName, replicationConfig = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(replicationConfig)) { - throw new errors.InvalidArgumentError('replicationConfig should be of type "object"') - } else { - if (_.isEmpty(replicationConfig.role)) { - throw new errors.InvalidArgumentError('Role cannot be empty') - } else if (replicationConfig.role && !isString(replicationConfig.role)) { - throw new errors.InvalidArgumentError('Invalid value for role', replicationConfig.role) - } - if (_.isEmpty(replicationConfig.rules)) { - throw new errors.InvalidArgumentError('Minimum one replication rule must be specified') - } - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'PUT' - let query = 'replication' - const headers = {} - - const replicationParamsConfig = { - ReplicationConfiguration: { - Role: replicationConfig.role, - Rule: replicationConfig.rules, - }, - } - - const builder = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true }) - - let payload = builder.buildObject(replicationParamsConfig) - - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getBucketReplication(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'replication' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let replicationConfig = Buffer.from('') - pipesetup(response, transformers.replicationConfigTransformer()) - .on('data', (data) => { - replicationConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, replicationConfig) - }) - }) - } - - removeBucketReplication(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'DELETE' - const query = 'replication' - this.makeRequest({ method, bucketName, query }, '', [200, 204], '', false, cb) - } - - getObjectLegalHold(bucketName, objectName, getOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isObject(getOpts)) { - throw new TypeError('getOpts should be of type "Object"') - } else if (Object.keys(getOpts).length > 0 && getOpts.versionId && !isString(getOpts.versionId)) { - throw new TypeError('versionId should be of type string.:', getOpts.versionId) - } - - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - const method = 'GET' - let query = 'legal-hold' - - if (getOpts.versionId) { - query += `&versionId=${getOpts.versionId}` - } - - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let legalHoldConfig = Buffer.from('') - pipesetup(response, transformers.objectLegalHoldTransformer()) - .on('data', (data) => { - legalHoldConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, legalHoldConfig) - }) - }) - } - - setObjectLegalHold(bucketName, objectName, setOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - const defaultOpts = { - status: LEGAL_HOLD_STATUS.ENABLED, - } - if (isFunction(setOpts)) { - cb = setOpts - setOpts = defaultOpts - } - - if (!isObject(setOpts)) { - throw new TypeError('setOpts should be of type "Object"') - } else { - if (![LEGAL_HOLD_STATUS.ENABLED, LEGAL_HOLD_STATUS.DISABLED].includes(setOpts.status)) { - throw new TypeError('Invalid status: ' + setOpts.status) - } - if (setOpts.versionId && !setOpts.versionId.length) { - throw new TypeError('versionId should be of type string.:' + setOpts.versionId) - } - } - - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - if (_.isEmpty(setOpts)) { - setOpts = { - defaultOpts, - } - } - - const method = 'PUT' - let query = 'legal-hold' - - if (setOpts.versionId) { - query += `&versionId=${setOpts.versionId}` - } - - let config = { - Status: setOpts.status, - } - - const builder = new xml2js.Builder({ rootName: 'LegalHold', renderOpts: { pretty: false }, headless: true }) - const payload = builder.buildObject(config) - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200], '', false, cb) - } - async setCredentialsProvider(credentialsProvider) { - if (!(credentialsProvider instanceof CredentialProvider)) { - throw new Error('Unable to get credentials. Expected instance of CredentialProvider') - } - this.credentialsProvider = credentialsProvider - await this.checkAndRefreshCreds() - } - - async checkAndRefreshCreds() { - if (this.credentialsProvider) { - return await this.fetchCredentials() - } - } - - async fetchCredentials() { - if (this.credentialsProvider) { - const credentialsConf = await this.credentialsProvider.getCredentials() - if (credentialsConf) { - this.accessKey = credentialsConf.getAccessKey() - this.secretKey = credentialsConf.getSecretKey() - this.sessionToken = credentialsConf.getSessionToken() - } else { - throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') - } - } else { - throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') - } - } - - /** - * Internal Method to abort a multipart upload request in case of any errors. - * @param bucketName __string__ Bucket Name - * @param objectName __string__ Object Name - * @param uploadId __string__ id of a multipart upload to cancel during compose object sequence. - * @param cb __function__ callback function - */ - abortMultipartUpload(bucketName, objectName, uploadId, cb) { - const method = 'DELETE' - let query = `uploadId=${uploadId}` - - const requestOptions = { method, bucketName, objectName: objectName, query } - this.makeRequest(requestOptions, '', [204], '', false, cb) - } - - /** - * Internal method to upload a part during compose object. - * @param partConfig __object__ contains the following. - * bucketName __string__ - * objectName __string__ - * uploadID __string__ - * partNumber __number__ - * headers __object__ - * @param cb called with null incase of error. - */ - uploadPartCopy(partConfig, cb) { - const { bucketName, objectName, uploadID, partNumber, headers } = partConfig - - const method = 'PUT' - let query = `uploadId=${uploadID}&partNumber=${partNumber}` - const requestOptions = { method, bucketName, objectName: objectName, query, headers } - return this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - let partCopyResult = Buffer.from('') - if (e) { - return cb(e) - } - pipesetup(response, transformers.uploadPartTransformer()) - .on('data', (data) => { - partCopyResult = data - }) - .on('error', cb) - .on('end', () => { - let uploadPartCopyRes = { - etag: sanitizeETag(partCopyResult.ETag), - key: objectName, - part: partNumber, - } - - cb(null, uploadPartCopyRes) - }) - }) - } - - composeObject(destObjConfig = {}, sourceObjList = [], cb) { - const me = this // many async flows. so store the ref. - const sourceFilesLength = sourceObjList.length - - if (!isArray(sourceObjList)) { - throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') - } - if (!(destObjConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - - if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, - ) - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - for (let i = 0; i < sourceFilesLength; i++) { - if (!sourceObjList[i].validate()) { - return false - } - } - - if (!destObjConfig.validate()) { - return false - } - - const getStatOptions = (srcConfig) => { - let statOpts = {} - if (!_.isEmpty(srcConfig.VersionID)) { - statOpts = { - versionId: srcConfig.VersionID, - } - } - return statOpts - } - const srcObjectSizes = [] - let totalSize = 0 - let totalParts = 0 - - const sourceObjStats = sourceObjList.map((srcItem) => - me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), - ) - - return Promise.all(sourceObjStats) - .then((srcObjectInfos) => { - const validatedStats = srcObjectInfos.map((resItemStat, index) => { - const srcConfig = sourceObjList[index] - - let srcCopySize = resItemStat.size - // Check if a segment is specified, and if so, is the - // segment within object bounds? - if (srcConfig.MatchRange) { - // Since range is specified, - // 0 <= src.srcStart <= src.srcEnd - // so only invalid case to check is: - const srcStart = srcConfig.Start - const srcEnd = srcConfig.End - if (srcEnd >= srcCopySize || srcStart < 0) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, - ) - } - srcCopySize = srcEnd - srcStart + 1 - } - - // Only the last source may be less than `absMinPartSize` - if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, - ) - } - - // Is data to copy too large? - totalSize += srcCopySize - if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { - throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) - } - - // record source size - srcObjectSizes[index] = srcCopySize - - // calculate parts needed for current source - totalParts += partsRequired(srcCopySize) - // Do we need more parts than we are allowed? - if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, - ) - } - - return resItemStat - }) - - if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { - return this.copyObject(sourceObjList[0], destObjConfig, cb) // use copyObjectV2 - } - - // preserve etag to avoid modification of object while copying. - for (let i = 0; i < sourceFilesLength; i++) { - sourceObjList[i].MatchETag = validatedStats[i].etag - } - - const splitPartSizeList = validatedStats.map((resItemStat, idx) => { - const calSize = calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) - return calSize - }) - - function getUploadPartConfigList(uploadId) { - const uploadPartConfigList = [] - - splitPartSizeList.forEach((splitSize, splitIndex) => { - const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize - - let partIndex = splitIndex + 1 // part index starts from 1. - const totalUploads = Array.from(startIdx) - - const headers = sourceObjList[splitIndex].getHeaders() - - totalUploads.forEach((splitStart, upldCtrIdx) => { - let splitEnd = endIdx[upldCtrIdx] - - const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` - headers['x-amz-copy-source'] = `${sourceObj}` - headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` - - const uploadPartConfig = { - bucketName: destObjConfig.Bucket, - objectName: destObjConfig.Object, - uploadID: uploadId, - partNumber: partIndex, - headers: headers, - sourceObj: sourceObj, - } - - uploadPartConfigList.push(uploadPartConfig) - }) - }) - - return uploadPartConfigList - } - - const performUploadParts = (uploadId) => { - const uploadList = getUploadPartConfigList(uploadId) - - async.map(uploadList, me.uploadPartCopy.bind(me), (err, res) => { - if (err) { - return this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, cb) - } - const partsDone = res.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) - return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone, cb) - }) - } - - const newUploadHeaders = destObjConfig.getHeaders() - - me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders, (err, uploadId) => { - if (err) { - return cb(err, null) - } - performUploadParts(uploadId) - }) - }) - .catch((error) => { - cb(error, null) - }) - } - selectObjectContent(bucketName, objectName, selectOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!_.isEmpty(selectOpts)) { - if (!isString(selectOpts.expression)) { - throw new TypeError('sqlExpression should be of type "string"') - } - if (!_.isEmpty(selectOpts.inputSerialization)) { - if (!isObject(selectOpts.inputSerialization)) { - throw new TypeError('inputSerialization should be of type "object"') - } - } else { - throw new TypeError('inputSerialization is required') - } - if (!_.isEmpty(selectOpts.outputSerialization)) { - if (!isObject(selectOpts.outputSerialization)) { - throw new TypeError('outputSerialization should be of type "object"') - } - } else { - throw new TypeError('outputSerialization is required') - } - } else { - throw new TypeError('valid select configuration is required') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'POST' - let query = `select` - query += '&select-type=2' - - const config = [ - { - Expression: selectOpts.expression, - }, - { - ExpressionType: selectOpts.expressionType || 'SQL', - }, - { - InputSerialization: [selectOpts.inputSerialization], - }, - { - OutputSerialization: [selectOpts.outputSerialization], - }, - ] - - // Optional - if (selectOpts.requestProgress) { - config.push({ RequestProgress: selectOpts.requestProgress }) - } - // Optional - if (selectOpts.scanRange) { - config.push({ ScanRange: selectOpts.scanRange }) - } - - const builder = new xml2js.Builder({ - rootName: 'SelectObjectContentRequest', - renderOpts: { pretty: false }, - headless: true, - }) - const payload = builder.buildObject(config) - - this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let selectResult - pipesetup(response, transformers.selectObjectContentTransformer()) - .on('data', (data) => { - selectResult = parseSelectObjectContentResponse(data) - }) - .on('error', cb) - .on('end', () => { - cb(null, selectResult) - }) - }) - } - - get extensions() { - if (!this.clientExtensions) { - this.clientExtensions = new extensions(this) - } - return this.clientExtensions - } -} - -// Promisify various public-facing APIs on the Client module. -Client.prototype.makeBucket = promisify(Client.prototype.makeBucket) -Client.prototype.listBuckets = promisify(Client.prototype.listBuckets) -Client.prototype.bucketExists = promisify(Client.prototype.bucketExists) -Client.prototype.removeBucket = promisify(Client.prototype.removeBucket) - -Client.prototype.getObject = promisify(Client.prototype.getObject) -Client.prototype.getPartialObject = promisify(Client.prototype.getPartialObject) -Client.prototype.fGetObject = promisify(Client.prototype.fGetObject) -Client.prototype.putObject = promisify(Client.prototype.putObject) -Client.prototype.fPutObject = promisify(Client.prototype.fPutObject) -Client.prototype.copyObject = promisify(Client.prototype.copyObject) -Client.prototype.statObject = promisify(Client.prototype.statObject) -Client.prototype.removeObject = promisify(Client.prototype.removeObject) -Client.prototype.removeObjects = promisify(Client.prototype.removeObjects) - -Client.prototype.presignedUrl = promisify(Client.prototype.presignedUrl) -Client.prototype.presignedGetObject = promisify(Client.prototype.presignedGetObject) -Client.prototype.presignedPutObject = promisify(Client.prototype.presignedPutObject) -Client.prototype.presignedPostPolicy = promisify(Client.prototype.presignedPostPolicy) -Client.prototype.getBucketNotification = promisify(Client.prototype.getBucketNotification) -Client.prototype.setBucketNotification = promisify(Client.prototype.setBucketNotification) -Client.prototype.removeAllBucketNotification = promisify(Client.prototype.removeAllBucketNotification) -Client.prototype.getBucketPolicy = promisify(Client.prototype.getBucketPolicy) -Client.prototype.setBucketPolicy = promisify(Client.prototype.setBucketPolicy) -Client.prototype.removeIncompleteUpload = promisify(Client.prototype.removeIncompleteUpload) -Client.prototype.getBucketVersioning = promisify(Client.prototype.getBucketVersioning) -Client.prototype.setBucketVersioning = promisify(Client.prototype.setBucketVersioning) -Client.prototype.setBucketTagging = promisify(Client.prototype.setBucketTagging) -Client.prototype.removeBucketTagging = promisify(Client.prototype.removeBucketTagging) -Client.prototype.getBucketTagging = promisify(Client.prototype.getBucketTagging) -Client.prototype.setObjectTagging = promisify(Client.prototype.setObjectTagging) -Client.prototype.removeObjectTagging = promisify(Client.prototype.removeObjectTagging) -Client.prototype.getObjectTagging = promisify(Client.prototype.getObjectTagging) -Client.prototype.setBucketLifecycle = promisify(Client.prototype.setBucketLifecycle) -Client.prototype.getBucketLifecycle = promisify(Client.prototype.getBucketLifecycle) -Client.prototype.removeBucketLifecycle = promisify(Client.prototype.removeBucketLifecycle) -Client.prototype.setObjectLockConfig = promisify(Client.prototype.setObjectLockConfig) -Client.prototype.getObjectLockConfig = promisify(Client.prototype.getObjectLockConfig) -Client.prototype.putObjectRetention = promisify(Client.prototype.putObjectRetention) -Client.prototype.getObjectRetention = promisify(Client.prototype.getObjectRetention) -Client.prototype.setBucketEncryption = promisify(Client.prototype.setBucketEncryption) -Client.prototype.getBucketEncryption = promisify(Client.prototype.getBucketEncryption) -Client.prototype.removeBucketEncryption = promisify(Client.prototype.removeBucketEncryption) -Client.prototype.setBucketReplication = promisify(Client.prototype.setBucketReplication) -Client.prototype.getBucketReplication = promisify(Client.prototype.getBucketReplication) -Client.prototype.removeBucketReplication = promisify(Client.prototype.removeBucketReplication) -Client.prototype.setObjectLegalHold = promisify(Client.prototype.setObjectLegalHold) -Client.prototype.getObjectLegalHold = promisify(Client.prototype.getObjectLegalHold) -Client.prototype.composeObject = promisify(Client.prototype.composeObject) -Client.prototype.selectObjectContent = promisify(Client.prototype.selectObjectContent) - -export class CopyConditions { - constructor() { - this.modified = '' - this.unmodified = '' - this.matchETag = '' - this.matchETagExcept = '' - } - - setModified(date) { - if (!(date instanceof Date)) { - throw new TypeError('date must be of type Date') - } - - this.modified = date.toUTCString() - } - - setUnmodified(date) { - if (!(date instanceof Date)) { - throw new TypeError('date must be of type Date') - } - - this.unmodified = date.toUTCString() - } - - setMatchETag(etag) { - this.matchETag = etag - } - - setMatchETagExcept(etag) { - this.matchETagExcept = etag - } -} - -// Build PostPolicy object that can be signed by presignedPostPolicy -export class PostPolicy { - constructor() { - this.policy = { - conditions: [], - } - this.formData = {} - } - - // set expiration date - setExpires(date) { - if (!date) { - throw new errors.InvalidDateError('Invalid date : cannot be null') - } - this.policy.expiration = date.toISOString() - } - - // set object name - setKey(objectName) { - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name : ${objectName}`) - } - this.policy.conditions.push(['eq', '$key', objectName]) - this.formData.key = objectName - } - - // set object name prefix, i.e policy allows any keys with this prefix - setKeyStartsWith(prefix) { - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - this.policy.conditions.push(['starts-with', '$key', prefix]) - this.formData.key = prefix - } - - // set bucket name - setBucket(bucketName) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) - } - this.policy.conditions.push(['eq', '$bucket', bucketName]) - this.formData.bucket = bucketName - } - - // set Content-Type - setContentType(type) { - if (!type) { - throw new Error('content-type cannot be null') - } - this.policy.conditions.push(['eq', '$Content-Type', type]) - this.formData['Content-Type'] = type - } - - // set Content-Type prefix, i.e image/ allows any image - setContentTypeStartsWith(prefix) { - if (!prefix) { - throw new Error('content-type cannot be null') - } - this.policy.conditions.push(['starts-with', '$Content-Type', prefix]) - this.formData['Content-Type'] = prefix - } - - // set Content-Disposition - setContentDisposition(value) { - if (!value) { - throw new Error('content-disposition cannot be null') - } - this.policy.conditions.push(['eq', '$Content-Disposition', value]) - this.formData['Content-Disposition'] = value - } - - // set minimum/maximum length of what Content-Length can be. - setContentLengthRange(min, max) { - if (min > max) { - throw new Error('min cannot be more than max') - } - if (min < 0) { - throw new Error('min should be > 0') - } - if (max < 0) { - throw new Error('max should be > 0') - } - this.policy.conditions.push(['content-length-range', min, max]) - } - - // set user defined metadata - setUserMetaData(metaData) { - if (!isObject(metaData)) { - throw new TypeError('metadata should be of type "object"') - } - Object.entries(metaData).forEach(([key, value]) => { - const amzMetaDataKey = `x-amz-meta-${key}` - this.policy.conditions.push(['eq', `$${amzMetaDataKey}`, value]) - this.formData[amzMetaDataKey] = value - }) - } -} - -export * from './helpers' -export * from './notification' diff --git a/src/main/notification.js b/src/main/notification.js deleted file mode 100644 index 03765be6..00000000 --- a/src/main/notification.js +++ /dev/null @@ -1,200 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { EventEmitter } from 'events' - -import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers' -import * as transformers from './transformers' - -// Notification config - array of target configs. -// Target configs can be -// 1. Topic (simple notification service) -// 2. Queue (simple queue service) -// 3. CloudFront (lambda function) -export class NotificationConfig { - add(target) { - let instance = '' - if (target instanceof TopicConfig) { - instance = 'TopicConfiguration' - } - if (target instanceof QueueConfig) { - instance = 'QueueConfiguration' - } - if (target instanceof CloudFunctionConfig) { - instance = 'CloudFunctionConfiguration' - } - if (!this[instance]) { - this[instance] = [] - } - this[instance].push(target) - } -} - -// Base class for three supported configs. -class TargetConfig { - setId(id) { - this.Id = id - } - addEvent(newevent) { - if (!this.Event) { - this.Event = [] - } - this.Event.push(newevent) - } - addFilterSuffix(suffix) { - if (!this.Filter) { - this.Filter = { S3Key: { FilterRule: [] } } - } - this.Filter.S3Key.FilterRule.push({ Name: 'suffix', Value: suffix }) - } - addFilterPrefix(prefix) { - if (!this.Filter) { - this.Filter = { S3Key: { FilterRule: [] } } - } - this.Filter.S3Key.FilterRule.push({ Name: 'prefix', Value: prefix }) - } -} - -// 1. Topic (simple notification service) -export class TopicConfig extends TargetConfig { - constructor(arn) { - super() - this.Topic = arn - } -} - -// 2. Queue (simple queue service) -export class QueueConfig extends TargetConfig { - constructor(arn) { - super() - this.Queue = arn - } -} - -// 3. CloudFront (lambda function) -export class CloudFunctionConfig extends TargetConfig { - constructor(arn) { - super() - this.CloudFunction = arn - } -} - -export const buildARN = (partition, service, region, accountId, resource) => { - return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource -} - -export const ObjectCreatedAll = 's3:ObjectCreated:*' -export const ObjectCreatedPut = 's3:ObjectCreated:Put' -export const ObjectCreatedPost = 's3:ObjectCreated:Post' -export const ObjectCreatedCopy = 's3:ObjectCreated:Copy' -export const ObjectCreatedCompleteMultipartUpload = 's3:ObjectCreated:CompleteMultipartUpload' -export const ObjectRemovedAll = 's3:ObjectRemoved:*' -export const ObjectRemovedDelete = 's3:ObjectRemoved:Delete' -export const ObjectRemovedDeleteMarkerCreated = 's3:ObjectRemoved:DeleteMarkerCreated' -export const ObjectReducedRedundancyLostObject = 's3:ReducedRedundancyLostObject' - -// Poll for notifications, used in #listenBucketNotification. -// Listening constitutes repeatedly requesting s3 whether or not any -// changes have occurred. -export class NotificationPoller extends EventEmitter { - constructor(client, bucketName, prefix, suffix, events) { - super() - - this.client = client - this.bucketName = bucketName - this.prefix = prefix - this.suffix = suffix - this.events = events - - this.ending = false - } - - // Starts the polling. - start() { - this.ending = false - - process.nextTick(() => { - this.checkForChanges() - }) - } - - // Stops the polling. - stop() { - this.ending = true - } - - checkForChanges() { - // Don't continue if we're looping again but are cancelled. - if (this.ending) { - return - } - - let method = 'GET' - var queries = [] - if (this.prefix) { - var prefix = uriEscape(this.prefix) - queries.push(`prefix=${prefix}`) - } - if (this.suffix) { - var suffix = uriEscape(this.suffix) - queries.push(`suffix=${suffix}`) - } - if (this.events) { - this.events.forEach((s3event) => queries.push('events=' + uriEscape(s3event))) - } - queries.sort() - - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - const region = this.client.region || DEFAULT_REGION - this.client.makeRequest({ method, bucketName: this.bucketName, query }, '', [200], region, true, (e, response) => { - if (e) { - return this.emit('error', e) - } - - let transformer = transformers.getNotificationTransformer() - pipesetup(response, transformer) - .on('data', (result) => { - // Data is flushed periodically (every 5 seconds), so we should - // handle it after flushing from the JSON parser. - let records = result.Records - // If null (= no records), change to an empty array. - if (!records) { - records = [] - } - - // Iterate over the notifications and emit them individually. - records.forEach((record) => { - this.emit('notification', record) - }) - - // If we're done, stop. - if (this.ending) { - response.destroy() - } - }) - .on('error', (e) => this.emit('error', e)) - .on('end', () => { - // Do it again, if we haven't cancelled yet. - process.nextTick(() => { - this.checkForChanges() - }) - }) - }) - } -} diff --git a/src/main/object-uploader.js b/src/main/object-uploader.js deleted file mode 100644 index 6b0d26bf..00000000 --- a/src/main/object-uploader.js +++ /dev/null @@ -1,283 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import Crypto from 'crypto' -import * as querystring from 'query-string' -import { Transform } from 'stream' - -import { getVersionId, sanitizeETag } from './helpers' - -// We extend Transform because Writable does not implement ._flush(). -export default class ObjectUploader extends Transform { - constructor(client, bucketName, objectName, partSize, metaData, callback) { - super() - this.emptyStream = true - this.client = client - this.bucketName = bucketName - this.objectName = objectName - // The size of each multipart, chunked by BlockStream2. - this.partSize = partSize - // This is the metadata for the object. - this.metaData = metaData - - // Call like: callback(error, {etag, versionId}). - this.callback = callback - - // We need to keep track of what number chunk/part we're on. This increments - // each time _write() is called. Starts with 1, not 0. - this.partNumber = 1 - - // A list of the previously uploaded chunks, for resuming a file upload. This - // will be null if we aren't resuming an upload. - this.oldParts = null - - // Keep track of the etags for aggregating the chunks together later. Each - // etag represents a single chunk of the file. - this.etags = [] - - // This is for the multipart upload request — if null, we're either not initiated - // yet or we're flushing in one packet. - this.id = null - - // Handle errors. - this.on('error', (err) => { - callback(err) - }) - } - - _transform(chunk, encoding, callback) { - this.emptyStream = false - let method = 'PUT' - let headers = { 'Content-Length': chunk.length } - let md5digest = '' - - // Calculate and set Content-MD5 header if SHA256 is not set. - // This will happen only when there is a secure connection to the s3 server. - if (!this.client.enableSHA256) { - md5digest = Crypto.createHash('md5').update(chunk).digest() - headers['Content-MD5'] = md5digest.toString('base64') - } - // We can flush the object in one packet if it fits in one chunk. This is true - // if the chunk size is smaller than the part size, signifying the end of the - // stream. - if (this.partNumber == 1 && chunk.length < this.partSize) { - // PUT the chunk in a single request — use an empty query. - let options = { - method, - // Set user metadata as this is not a multipart upload - headers: Object.assign({}, this.metaData, headers), - query: '', - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - let result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // Give the etag back, we're done! - - process.nextTick(() => { - this.callback(null, result) - }) - - // Because we're sure the stream has ended, allow it to flush and end. - callback() - }) - - return - } - - // If we aren't flushing in one packet, we need to initiate the multipart upload, - // if it hasn't already been done. The write will be buffered until the upload has been - // initiated. - if (this.id === null) { - this.once('ready', () => { - this._transform(chunk, encoding, callback) - }) - - // Check for an incomplete previous upload. - this.client.findUploadId(this.bucketName, this.objectName, (err, id) => { - if (err) { - return this.emit('error', err) - } - - // If no upload ID exists, initiate a new one. - if (!id) { - this.client.initiateNewMultipartUpload(this.bucketName, this.objectName, this.metaData, (err, id) => { - if (err) { - return callback(err) - } - - this.id = id - - // We are now ready to accept new chunks — this will flush the buffered chunk. - this.emit('ready') - }) - - return - } - - this.id = id - - // Retrieve the pre-uploaded parts, if we need to resume the upload. - this.client.listParts(this.bucketName, this.objectName, id, (err, etags) => { - if (err) { - return this.emit('error', err) - } - - // It is possible for no parts to be already uploaded. - if (!etags) { - etags = [] - } - - // oldParts will become an object, allowing oldParts[partNumber].etag - this.oldParts = etags.reduce(function (prev, item) { - if (!prev[item.part]) { - prev[item.part] = item - } - return prev - }, {}) - - this.emit('ready') - }) - }) - - return - } - - // Continue uploading various parts if we have initiated multipart upload. - let partNumber = this.partNumber++ - - // Check to see if we've already uploaded this chunk. If the hash sums match, - // we can skip to the next chunk. - if (this.oldParts) { - let oldPart = this.oldParts[partNumber] - - // Calulcate the md5 hash, if it has not already been calculated. - if (!md5digest) { - md5digest = Crypto.createHash('md5').update(chunk).digest() - } - - if (oldPart && md5digest.toString('hex') === oldPart.etag) { - // The md5 matches, the chunk has already been uploaded. - this.etags.push({ part: partNumber, etag: oldPart.etag }) - - callback() - return - } - } - - // Write the chunk with an uploader. - let query = querystring.stringify({ - partNumber: partNumber, - uploadId: this.id, - }) - - let options = { - method, - query, - headers, - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - - // In order to aggregate the parts together, we need to collect the etags. - let etag = response.headers.etag - if (etag) { - etag = etag.replace(/^"/, '').replace(/"$/, '') - } - - this.etags.push({ part: partNumber, etag }) - - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // We're ready for the next chunk. - callback() - }) - } - - _flush(callback) { - if (this.emptyStream) { - let method = 'PUT' - let headers = Object.assign({}, this.metaData, { 'Content-Length': 0 }) - let options = { - method, - headers, - query: '', - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, '', [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - - let result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // Give the etag back, we're done! - process.nextTick(() => { - this.callback(null, result) - }) - - // Because we're sure the stream has ended, allow it to flush and end. - callback() - }) - - return - } - // If it has been uploaded in a single packet, we don't have to do anything. - if (this.id === null) { - return - } - - // This is called when all of the chunks uploaded successfully, thus - // completing the multipart upload. - this.client.completeMultipartUpload(this.bucketName, this.objectName, this.id, this.etags, (err, etag) => { - if (err) { - return callback(err) - } - - // Call our callback on the next tick to allow the streams infrastructure - // to finish what its doing before we continue. - process.nextTick(() => { - this.callback(null, etag) - }) - - callback() - }) - } -} diff --git a/src/main/s3-endpoints.js b/src/main/s3-endpoints.js deleted file mode 100644 index 60950de6..00000000 --- a/src/main/s3-endpoints.js +++ /dev/null @@ -1,50 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { isString } from './helpers.js' - -// List of currently supported endpoints. -let awsS3Endpoint = { - 'us-east-1': 's3.amazonaws.com', - 'us-east-2': 's3-us-east-2.amazonaws.com', - 'us-west-1': 's3-us-west-1.amazonaws.com', - 'us-west-2': 's3-us-west-2.amazonaws.com', - 'ca-central-1': 's3.ca-central-1.amazonaws.com', - 'eu-west-1': 's3-eu-west-1.amazonaws.com', - 'eu-west-2': 's3-eu-west-2.amazonaws.com', - 'sa-east-1': 's3-sa-east-1.amazonaws.com', - 'eu-central-1': 's3-eu-central-1.amazonaws.com', - 'ap-south-1': 's3-ap-south-1.amazonaws.com', - 'ap-southeast-1': 's3-ap-southeast-1.amazonaws.com', - 'ap-southeast-2': 's3-ap-southeast-2.amazonaws.com', - 'ap-northeast-1': 's3-ap-northeast-1.amazonaws.com', - 'cn-north-1': 's3.cn-north-1.amazonaws.com.cn', - 'ap-east-1': 's3.ap-east-1.amazonaws.com', - 'eu-north-1': 's3.eu-north-1.amazonaws.com', - // Add new endpoints here. -} - -// getS3Endpoint get relevant endpoint for the region. -export function getS3Endpoint(region) { - if (!isString(region)) { - throw new TypeError(`Invalid region: ${region}`) - } - var endpoint = awsS3Endpoint[region] - if (endpoint) { - return endpoint - } - return 's3.amazonaws.com' -} diff --git a/src/main/signing.js b/src/main/signing.js deleted file mode 100644 index 07cc0c92..00000000 --- a/src/main/signing.js +++ /dev/null @@ -1,298 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import Crypto from 'crypto' -import _ from 'lodash' - -import * as errors from './errors.js' -import { getScope, isArray, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.js' - -const signV4Algorithm = 'AWS4-HMAC-SHA256' - -// getCanonicalRequest generate a canonical request of style. -// -// canonicalRequest = -// \n -// \n -// \n -// \n -// \n -// -// -function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload) { - if (!isString(method)) { - throw new TypeError('method should be of type "string"') - } - if (!isString(path)) { - throw new TypeError('path should be of type "string"') - } - if (!isObject(headers)) { - throw new TypeError('headers should be of type "object"') - } - if (!isArray(signedHeaders)) { - throw new TypeError('signedHeaders should be of type "array"') - } - if (!isString(hashedPayload)) { - throw new TypeError('hashedPayload should be of type "string"') - } - const headersArray = signedHeaders.reduce((acc, i) => { - // Trim spaces from the value (required by V4 spec) - const val = `${headers[i]}`.replace(/ +/g, ' ') - acc.push(`${i.toLowerCase()}:${val}`) - return acc - }, []) - - const requestResource = path.split('?')[0] - let requestQuery = path.split('?')[1] - if (!requestQuery) { - requestQuery = '' - } - - if (requestQuery) { - requestQuery = requestQuery - .split('&') - .sort() - .map((element) => (element.indexOf('=') === -1 ? element + '=' : element)) - .join('&') - } - - const canonical = [] - canonical.push(method.toUpperCase()) - canonical.push(requestResource) - canonical.push(requestQuery) - canonical.push(headersArray.join('\n') + '\n') - canonical.push(signedHeaders.join(';').toLowerCase()) - canonical.push(hashedPayload) - return canonical.join('\n') -} - -// generate a credential string -function getCredential(accessKey, region, requestDate, serviceName = 's3') { - if (!isString(accessKey)) { - throw new TypeError('accessKey should be of type "string"') - } - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isObject(requestDate)) { - throw new TypeError('requestDate should be of type "object"') - } - return `${accessKey}/${getScope(region, requestDate, serviceName)}` -} - -// Returns signed headers array - alphabetically sorted -function getSignedHeaders(headers) { - if (!isObject(headers)) { - throw new TypeError('request should be of type "object"') - } - // Excerpts from @lsegal - https://github.com/aws/aws-sdk-js/issues/659#issuecomment-120477258 - // - // User-Agent: - // - // This is ignored from signing because signing this causes problems with generating pre-signed URLs - // (that are executed by other agents) or when customers pass requests through proxies, which may - // modify the user-agent. - // - // Content-Length: - // - // This is ignored from signing because generating a pre-signed URL should not provide a content-length - // constraint, specifically when vending a S3 pre-signed PUT URL. The corollary to this is that when - // sending regular requests (non-pre-signed), the signature contains a checksum of the body, which - // implicitly validates the payload length (since changing the number of bytes would change the checksum) - // and therefore this header is not valuable in the signature. - // - // Content-Type: - // - // Signing this header causes quite a number of problems in browser environments, where browsers - // like to modify and normalize the content-type header in different ways. There is more information - // on this in https://github.com/aws/aws-sdk-js/issues/244. Avoiding this field simplifies logic - // and reduces the possibility of future bugs - // - // Authorization: - // - // Is skipped for obvious reasons - - const ignoredHeaders = ['authorization', 'content-length', 'content-type', 'user-agent'] - return _.map(headers, (v, header) => header) - .filter((header) => ignoredHeaders.indexOf(header) === -1) - .sort() -} - -// returns the key used for calculating signature -function getSigningKey(date, region, secretKey, serviceName = 's3') { - if (!isObject(date)) { - throw new TypeError('date should be of type "object"') - } - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isString(secretKey)) { - throw new TypeError('secretKey should be of type "string"') - } - const dateLine = makeDateShort(date) - let hmac1 = Crypto.createHmac('sha256', 'AWS4' + secretKey) - .update(dateLine) - .digest(), - hmac2 = Crypto.createHmac('sha256', hmac1).update(region).digest(), - hmac3 = Crypto.createHmac('sha256', hmac2).update(serviceName).digest() - return Crypto.createHmac('sha256', hmac3).update('aws4_request').digest() -} - -// returns the string that needs to be signed -function getStringToSign(canonicalRequest, requestDate, region, serviceName = 's3') { - if (!isString(canonicalRequest)) { - throw new TypeError('canonicalRequest should be of type "string"') - } - if (!isObject(requestDate)) { - throw new TypeError('requestDate should be of type "object"') - } - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - const hash = Crypto.createHash('sha256').update(canonicalRequest).digest('hex') - const scope = getScope(region, requestDate, serviceName) - const stringToSign = [] - stringToSign.push(signV4Algorithm) - stringToSign.push(makeDateLong(requestDate)) - stringToSign.push(scope) - stringToSign.push(hash) - const signString = stringToSign.join('\n') - return signString -} - -// calculate the signature of the POST policy -export function postPresignSignatureV4(region, date, secretKey, policyBase64) { - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isObject(date)) { - throw new TypeError('date should be of type "object"') - } - if (!isString(secretKey)) { - throw new TypeError('secretKey should be of type "string"') - } - if (!isString(policyBase64)) { - throw new TypeError('policyBase64 should be of type "string"') - } - const signingKey = getSigningKey(date, region, secretKey) - return Crypto.createHmac('sha256', signingKey).update(policyBase64).digest('hex').toLowerCase() -} - -// Returns the authorization header -export function signV4(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { - if (!isObject(request)) { - throw new TypeError('request should be of type "object"') - } - if (!isString(accessKey)) { - throw new TypeError('accessKey should be of type "string"') - } - if (!isString(secretKey)) { - throw new TypeError('secretKey should be of type "string"') - } - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - - if (!accessKey) { - throw new errors.AccessKeyRequiredError('accessKey is required for signing') - } - if (!secretKey) { - throw new errors.SecretKeyRequiredError('secretKey is required for signing') - } - - const sha256sum = request.headers['x-amz-content-sha256'] - - const signedHeaders = getSignedHeaders(request.headers) - const canonicalRequest = getCanonicalRequest(request.method, request.path, request.headers, signedHeaders, sha256sum) - const serviceIdentifier = serviceName || 's3' - const stringToSign = getStringToSign(canonicalRequest, requestDate, region, serviceIdentifier) - const signingKey = getSigningKey(requestDate, region, secretKey, serviceIdentifier) - const credential = getCredential(accessKey, region, requestDate, serviceIdentifier) - const signature = Crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex').toLowerCase() - - return `${signV4Algorithm} Credential=${credential}, SignedHeaders=${signedHeaders - .join(';') - .toLowerCase()}, Signature=${signature}` -} - -export function signV4ByServiceName(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { - return signV4(request, accessKey, secretKey, region, requestDate, serviceName) -} -// returns a presigned URL string -export function presignSignatureV4(request, accessKey, secretKey, sessionToken, region, requestDate, expires) { - if (!isObject(request)) { - throw new TypeError('request should be of type "object"') - } - if (!isString(accessKey)) { - throw new TypeError('accessKey should be of type "string"') - } - if (!isString(secretKey)) { - throw new TypeError('secretKey should be of type "string"') - } - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - - if (!accessKey) { - throw new errors.AccessKeyRequiredError('accessKey is required for presigning') - } - if (!secretKey) { - throw new errors.SecretKeyRequiredError('secretKey is required for presigning') - } - - if (!isNumber(expires)) { - throw new TypeError('expires should be of type "number"') - } - if (expires < 1) { - throw new errors.ExpiresParamError('expires param cannot be less than 1 seconds') - } - if (expires > 604800) { - throw new errors.ExpiresParamError('expires param cannot be greater than 7 days') - } - - const iso8601Date = makeDateLong(requestDate) - const signedHeaders = getSignedHeaders(request.headers) - const credential = getCredential(accessKey, region, requestDate) - const hashedPayload = 'UNSIGNED-PAYLOAD' - - const requestQuery = [] - requestQuery.push(`X-Amz-Algorithm=${signV4Algorithm}`) - requestQuery.push(`X-Amz-Credential=${uriEscape(credential)}`) - requestQuery.push(`X-Amz-Date=${iso8601Date}`) - requestQuery.push(`X-Amz-Expires=${expires}`) - requestQuery.push(`X-Amz-SignedHeaders=${uriEscape(signedHeaders.join(';').toLowerCase())}`) - if (sessionToken) { - requestQuery.push(`X-Amz-Security-Token=${uriEscape(sessionToken)}`) - } - - const resource = request.path.split('?')[0] - let query = request.path.split('?')[1] - if (query) { - query = query + '&' + requestQuery.join('&') - } else { - query = requestQuery.join('&') - } - - const path = resource + '?' + query - - const canonicalRequest = getCanonicalRequest(request.method, path, request.headers, signedHeaders, hashedPayload) - - const stringToSign = getStringToSign(canonicalRequest, requestDate, region) - const signingKey = getSigningKey(requestDate, region, secretKey) - const signature = Crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex').toLowerCase() - const presignedUrl = request.protocol + '//' + request.headers.host + path + `&X-Amz-Signature=${signature}` - return presignedUrl -} diff --git a/src/main/transformers.js b/src/main/transformers.js deleted file mode 100644 index b8645796..00000000 --- a/src/main/transformers.js +++ /dev/null @@ -1,262 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import Crypto from 'crypto' -import JSONParser from 'json-stream' -import * as _ from 'lodash' -import Through2 from 'through2' - -import * as errors from './errors.js' -import { isFunction } from './helpers.js' -import * as xmlParsers from './xml-parsers.js' - -// getConcater returns a stream that concatenates the input and emits -// the concatenated output when 'end' has reached. If an optional -// parser function is passed upon reaching the 'end' of the stream, -// `parser(concatenated_data)` will be emitted. -export function getConcater(parser, emitError) { - var objectMode = false - var bufs = [] - - if (parser && !isFunction(parser)) { - throw new TypeError('parser should be of type "function"') - } - - if (parser) { - objectMode = true - } - - return Through2( - { objectMode }, - function (chunk, enc, cb) { - bufs.push(chunk) - cb() - }, - function (cb) { - if (emitError) { - cb(parser(Buffer.concat(bufs).toString())) - // cb(e) would mean we have to emit 'end' by explicitly calling this.push(null) - this.push(null) - return - } - if (bufs.length) { - if (parser) { - this.push(parser(Buffer.concat(bufs).toString())) - } else { - this.push(Buffer.concat(bufs)) - } - } - cb() - }, - ) -} - -// Generates an Error object depending on http statusCode and XML body -export function getErrorTransformer(response) { - var statusCode = response.statusCode - var code, message - if (statusCode === 301) { - code = 'MovedPermanently' - message = 'Moved Permanently' - } else if (statusCode === 307) { - code = 'TemporaryRedirect' - message = 'Are you using the correct endpoint URL?' - } else if (statusCode === 403) { - code = 'AccessDenied' - message = 'Valid and authorized credentials required' - } else if (statusCode === 404) { - code = 'NotFound' - message = 'Not Found' - } else if (statusCode === 405) { - code = 'MethodNotAllowed' - message = 'Method Not Allowed' - } else if (statusCode === 501) { - code = 'MethodNotAllowed' - message = 'Method Not Allowed' - } else { - code = 'UnknownError' - message = `${statusCode}` - } - - var headerInfo = {} - // A value created by S3 compatible server that uniquely identifies - // the request. - headerInfo.amzRequestid = response.headersSent ? response.getHeader('x-amz-request-id') : null - // A special token that helps troubleshoot API replies and issues. - headerInfo.amzId2 = response.headersSent ? response.getHeader('x-amz-id-2') : null - // Region where the bucket is located. This header is returned only - // in HEAD bucket and ListObjects response. - headerInfo.amzBucketRegion = response.headersSent ? response.getHeader('x-amz-bucket-region') : null - - return getConcater((xmlString) => { - let getError = () => { - // Message should be instantiated for each S3Errors. - var e = new errors.S3Error(message) - // S3 Error code. - e.code = code - _.each(headerInfo, (value, key) => { - e[key] = value - }) - return e - } - if (!xmlString) { - return getError() - } - let e - try { - e = xmlParsers.parseError(xmlString, headerInfo) - } catch (ex) { - return getError() - } - return e - }, true) -} - -// A through stream that calculates md5sum and sha256sum -export function getHashSummer(enableSHA256) { - var md5 = Crypto.createHash('md5') - var sha256 = Crypto.createHash('sha256') - - return Through2.obj( - function (chunk, enc, cb) { - if (enableSHA256) { - sha256.update(chunk) - } else { - md5.update(chunk) - } - cb() - }, - function (cb) { - var md5sum = '' - var sha256sum = '' - if (enableSHA256) { - sha256sum = sha256.digest('hex') - } else { - md5sum = md5.digest('base64') - } - var hashData = { md5sum, sha256sum } - this.push(hashData) - this.push(null) - cb() - }, - ) -} - -// Following functions return a stream object that parses XML -// and emits suitable Javascript objects. - -// Parses CopyObject response. -export function getCopyObjectTransformer() { - return getConcater(xmlParsers.parseCopyObject) -} - -// Parses listBuckets response. -export function getListBucketTransformer() { - return getConcater(xmlParsers.parseListBucket) -} - -// Parses listMultipartUploads response. -export function getListMultipartTransformer() { - return getConcater(xmlParsers.parseListMultipart) -} - -// Parses listParts response. -export function getListPartsTransformer() { - return getConcater(xmlParsers.parseListParts) -} - -// Parses initMultipartUpload response. -export function getInitiateMultipartTransformer() { - return getConcater(xmlParsers.parseInitiateMultipart) -} - -// Parses listObjects response. -export function getListObjectsTransformer() { - return getConcater(xmlParsers.parseListObjects) -} - -// Parses listObjects response. -export function getListObjectsV2Transformer() { - return getConcater(xmlParsers.parseListObjectsV2) -} - -// Parses listObjects with metadata response. -export function getListObjectsV2WithMetadataTransformer() { - return getConcater(xmlParsers.parseListObjectsV2WithMetadata) -} - -// Parses completeMultipartUpload response. -export function getCompleteMultipartTransformer() { - return getConcater(xmlParsers.parseCompleteMultipart) -} - -// Parses getBucketLocation response. -export function getBucketRegionTransformer() { - return getConcater(xmlParsers.parseBucketRegion) -} - -// Parses GET/SET BucketNotification response -export function getBucketNotificationTransformer() { - return getConcater(xmlParsers.parseBucketNotification) -} - -// Parses a notification. -export function getNotificationTransformer() { - // This will parse and return each object. - return new JSONParser() -} - -export function bucketVersioningTransformer() { - return getConcater(xmlParsers.parseBucketVersioningConfig) -} - -export function getTagsTransformer() { - return getConcater(xmlParsers.parseTagging) -} - -export function lifecycleTransformer() { - return getConcater(xmlParsers.parseLifecycleConfig) -} - -export function objectLockTransformer() { - return getConcater(xmlParsers.parseObjectLockConfig) -} - -export function objectRetentionTransformer() { - return getConcater(xmlParsers.parseObjectRetentionConfig) -} -export function bucketEncryptionTransformer() { - return getConcater(xmlParsers.parseBucketEncryptionConfig) -} - -export function replicationConfigTransformer() { - return getConcater(xmlParsers.parseReplicationConfig) -} - -export function objectLegalHoldTransformer() { - return getConcater(xmlParsers.parseObjectLegalHoldConfig) -} - -export function uploadPartTransformer() { - return getConcater(xmlParsers.uploadPartParser) -} -export function selectObjectContentTransformer() { - return getConcater() -} - -export function removeObjectsTransformer() { - return getConcater(xmlParsers.removeObjectsParser) -} diff --git a/src/main/xml-parsers.js b/src/main/xml-parsers.js deleted file mode 100644 index 4ec2d53f..00000000 --- a/src/main/xml-parsers.js +++ /dev/null @@ -1,708 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const { XMLParser } = require('fast-xml-parser') -const fxp = new XMLParser() -import _ from 'lodash' - -import * as errors from './errors.js' -import { - isObject, - parseXml, - readableStream, - RETENTION_VALIDITY_UNITS, - sanitizeETag, - sanitizeObjectKey, - SelectResults, - toArray, -} from './helpers' -var crc32 = require('buffer-crc32') - -// Parse XML and return information as Javascript types - -// parse error XML response -export function parseError(xml, headerInfo) { - var xmlErr = {} - var xmlObj = fxp.parse(xml) - if (xmlObj.Error) { - xmlErr = xmlObj.Error - } - - var e = new errors.S3Error() - _.each(xmlErr, (value, key) => { - e[key.toLowerCase()] = value - }) - - _.each(headerInfo, (value, key) => { - e[key] = value - }) - return e -} - -// parse XML response for copy object -export function parseCopyObject(xml) { - var result = { - etag: '', - lastModified: '', - } - - var xmlobj = parseXml(xml) - if (!xmlobj.CopyObjectResult) { - throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"') - } - xmlobj = xmlobj.CopyObjectResult - if (xmlobj.ETag) { - result.etag = xmlobj.ETag.replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - } - if (xmlobj.LastModified) { - result.lastModified = new Date(xmlobj.LastModified) - } - - return result -} - -// parse XML response for listing in-progress multipart uploads -export function parseListMultipart(xml) { - var result = { - uploads: [], - prefixes: [], - isTruncated: false, - } - - var xmlobj = parseXml(xml) - - if (!xmlobj.ListMultipartUploadsResult) { - throw new errors.InvalidXMLError('Missing tag: "ListMultipartUploadsResult"') - } - xmlobj = xmlobj.ListMultipartUploadsResult - if (xmlobj.IsTruncated) { - result.isTruncated = xmlobj.IsTruncated - } - if (xmlobj.NextKeyMarker) { - result.nextKeyMarker = xmlobj.NextKeyMarker - } - if (xmlobj.NextUploadIdMarker) { - result.nextUploadIdMarker = xmlobj.nextUploadIdMarker - } - - if (xmlobj.CommonPrefixes) { - toArray(xmlobj.CommonPrefixes).forEach((prefix) => { - result.prefixes.push({ prefix: sanitizeObjectKey(toArray(prefix.Prefix)[0]) }) - }) - } - - if (xmlobj.Upload) { - toArray(xmlobj.Upload).forEach((upload) => { - var key = upload.Key - var uploadId = upload.UploadId - var initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } - var owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } - var storageClass = upload.StorageClass - var initiated = new Date(upload.Initiated) - result.uploads.push({ key, uploadId, initiator, owner, storageClass, initiated }) - }) - } - return result -} - -// parse XML response to list all the owned buckets -export function parseListBucket(xml) { - var result = [] - var xmlobj = parseXml(xml) - - if (!xmlobj.ListAllMyBucketsResult) { - throw new errors.InvalidXMLError('Missing tag: "ListAllMyBucketsResult"') - } - xmlobj = xmlobj.ListAllMyBucketsResult - - if (xmlobj.Buckets) { - if (xmlobj.Buckets.Bucket) { - toArray(xmlobj.Buckets.Bucket).forEach((bucket) => { - var name = bucket.Name - var creationDate = new Date(bucket.CreationDate) - result.push({ name, creationDate }) - }) - } - } - return result -} - -// parse XML response for bucket notification -export function parseBucketNotification(xml) { - var result = { - TopicConfiguration: [], - QueueConfiguration: [], - CloudFunctionConfiguration: [], - } - // Parse the events list - var genEvents = function (events) { - var result = [] - if (events) { - toArray(events).forEach((s3event) => { - result.push(s3event) - }) - } - return result - } - // Parse all filter rules - var genFilterRules = function (filters) { - var result = [] - if (filters) { - filters = toArray(filters) - if (filters[0].S3Key) { - filters[0].S3Key = toArray(filters[0].S3Key) - if (filters[0].S3Key[0].FilterRule) { - toArray(filters[0].S3Key[0].FilterRule).forEach((rule) => { - var Name = toArray(rule.Name)[0] - var Value = toArray(rule.Value)[0] - result.push({ Name, Value }) - }) - } - } - } - return result - } - - var xmlobj = parseXml(xml) - xmlobj = xmlobj.NotificationConfiguration - - // Parse all topic configurations in the xml - if (xmlobj.TopicConfiguration) { - toArray(xmlobj.TopicConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var Topic = toArray(config.Topic)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) - result.TopicConfiguration.push({ Id, Topic, Event, Filter }) - }) - } - // Parse all topic configurations in the xml - if (xmlobj.QueueConfiguration) { - toArray(xmlobj.QueueConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var Queue = toArray(config.Queue)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) - result.QueueConfiguration.push({ Id, Queue, Event, Filter }) - }) - } - // Parse all QueueConfiguration arrays - if (xmlobj.CloudFunctionConfiguration) { - toArray(xmlobj.CloudFunctionConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var CloudFunction = toArray(config.CloudFunction)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) - result.CloudFunctionConfiguration.push({ Id, CloudFunction, Event, Filter }) - }) - } - - return result -} - -// parse XML response for bucket region -export function parseBucketRegion(xml) { - // return region information - return parseXml(xml).LocationConstraint -} - -// parse XML response for list parts of an in progress multipart upload -export function parseListParts(xml) { - var xmlobj = parseXml(xml) - var result = { - isTruncated: false, - parts: [], - marker: undefined, - } - if (!xmlobj.ListPartsResult) { - throw new errors.InvalidXMLError('Missing tag: "ListPartsResult"') - } - xmlobj = xmlobj.ListPartsResult - if (xmlobj.IsTruncated) { - result.isTruncated = xmlobj.IsTruncated - } - if (xmlobj.NextPartNumberMarker) { - result.marker = +toArray(xmlobj.NextPartNumberMarker)[0] - } - if (xmlobj.Part) { - toArray(xmlobj.Part).forEach((p) => { - var part = +toArray(p.PartNumber)[0] - var lastModified = new Date(p.LastModified) - var etag = p.ETag.replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - result.parts.push({ part, lastModified, etag }) - }) - } - return result -} - -// parse XML response when a new multipart upload is initiated -export function parseInitiateMultipart(xml) { - var xmlobj = parseXml(xml) - - if (!xmlobj.InitiateMultipartUploadResult) { - throw new errors.InvalidXMLError('Missing tag: "InitiateMultipartUploadResult"') - } - xmlobj = xmlobj.InitiateMultipartUploadResult - - if (xmlobj.UploadId) { - return xmlobj.UploadId - } - throw new errors.InvalidXMLError('Missing tag: "UploadId"') -} - -// parse XML response when a multipart upload is completed -export function parseCompleteMultipart(xml) { - var xmlobj = parseXml(xml).CompleteMultipartUploadResult - if (xmlobj.Location) { - var location = toArray(xmlobj.Location)[0] - var bucket = toArray(xmlobj.Bucket)[0] - var key = xmlobj.Key - var etag = xmlobj.ETag.replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - - return { location, bucket, key, etag } - } - // Complete Multipart can return XML Error after a 200 OK response - if (xmlobj.Code && xmlobj.Message) { - var errCode = toArray(xmlobj.Code)[0] - var errMessage = toArray(xmlobj.Message)[0] - return { errCode, errMessage } - } -} - -const formatObjInfo = (content, opts = {}) => { - let { Key, LastModified, ETag, Size, VersionId, IsLatest } = content - - if (!isObject(opts)) { - opts = {} - } - - const name = sanitizeObjectKey(toArray(Key)[0]) - const lastModified = new Date(toArray(LastModified)[0]) - const etag = sanitizeETag(toArray(ETag)[0]) - - return { - name, - lastModified, - etag, - size: Size, - versionId: VersionId, - isLatest: IsLatest, - isDeleteMarker: opts.IsDeleteMarker ? opts.IsDeleteMarker : false, - } -} - -// parse XML response for list objects in a bucket -export function parseListObjects(xml) { - var result = { - objects: [], - isTruncated: false, - } - let isTruncated = false - let nextMarker, nextVersionKeyMarker - const xmlobj = parseXml(xml) - - const parseCommonPrefixesEntity = (responseEntity) => { - if (responseEntity) { - toArray(responseEntity).forEach((commonPrefix) => { - result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) - }) - } - } - - const listBucketResult = xmlobj.ListBucketResult - const listVersionsResult = xmlobj.ListVersionsResult - - if (listBucketResult) { - if (listBucketResult.IsTruncated) { - isTruncated = listBucketResult.IsTruncated - } - if (listBucketResult.Contents) { - toArray(listBucketResult.Contents).forEach((content) => { - const name = sanitizeObjectKey(toArray(content.Key)[0]) - const lastModified = new Date(toArray(content.LastModified)[0]) - const etag = sanitizeETag(toArray(content.ETag)[0]) - const size = content.Size - result.objects.push({ name, lastModified, etag, size }) - }) - } - - if (listBucketResult.NextMarker) { - nextMarker = listBucketResult.NextMarker - } - parseCommonPrefixesEntity(listBucketResult.CommonPrefixes) - } - - if (listVersionsResult) { - if (listVersionsResult.IsTruncated) { - isTruncated = listVersionsResult.IsTruncated - } - - if (listVersionsResult.Version) { - toArray(listVersionsResult.Version).forEach((content) => { - result.objects.push(formatObjInfo(content)) - }) - } - if (listVersionsResult.DeleteMarker) { - toArray(listVersionsResult.DeleteMarker).forEach((content) => { - result.objects.push(formatObjInfo(content, { IsDeleteMarker: true })) - }) - } - - if (listVersionsResult.NextKeyMarker) { - nextVersionKeyMarker = listVersionsResult.NextKeyMarker - } - if (listVersionsResult.NextVersionIdMarker) { - result.versionIdMarker = listVersionsResult.NextVersionIdMarker - } - parseCommonPrefixesEntity(listVersionsResult.CommonPrefixes) - } - - result.isTruncated = isTruncated - if (isTruncated) { - result.nextMarker = nextVersionKeyMarker || nextMarker - } - return result -} - -// parse XML response for list objects v2 in a bucket -export function parseListObjectsV2(xml) { - var result = { - objects: [], - isTruncated: false, - } - var xmlobj = parseXml(xml) - if (!xmlobj.ListBucketResult) { - throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') - } - xmlobj = xmlobj.ListBucketResult - if (xmlobj.IsTruncated) { - result.isTruncated = xmlobj.IsTruncated - } - if (xmlobj.NextContinuationToken) { - result.nextContinuationToken = xmlobj.NextContinuationToken - } - if (xmlobj.Contents) { - toArray(xmlobj.Contents).forEach((content) => { - var name = sanitizeObjectKey(toArray(content.Key)[0]) - var lastModified = new Date(content.LastModified) - var etag = sanitizeETag(content.ETag) - var size = content.Size - result.objects.push({ name, lastModified, etag, size }) - }) - } - if (xmlobj.CommonPrefixes) { - toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => { - result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) - }) - } - return result -} - -// parse XML response for list objects v2 with metadata in a bucket -export function parseListObjectsV2WithMetadata(xml) { - var result = { - objects: [], - isTruncated: false, - } - var xmlobj = parseXml(xml) - if (!xmlobj.ListBucketResult) { - throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') - } - xmlobj = xmlobj.ListBucketResult - if (xmlobj.IsTruncated) { - result.isTruncated = xmlobj.IsTruncated - } - if (xmlobj.NextContinuationToken) { - result.nextContinuationToken = xmlobj.NextContinuationToken - } - - if (xmlobj.Contents) { - toArray(xmlobj.Contents).forEach((content) => { - var name = sanitizeObjectKey(content.Key) - var lastModified = new Date(content.LastModified) - var etag = sanitizeETag(content.ETag) - var size = content.Size - var metadata - if (content.UserMetadata != null) { - metadata = toArray(content.UserMetadata)[0] - } else { - metadata = null - } - result.objects.push({ name, lastModified, etag, size, metadata }) - }) - } - - if (xmlobj.CommonPrefixes) { - toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => { - result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) - }) - } - return result -} - -export function parseBucketVersioningConfig(xml) { - var xmlObj = parseXml(xml) - return xmlObj.VersioningConfiguration -} - -export function parseTagging(xml) { - const xmlObj = parseXml(xml) - let result = [] - if (xmlObj.Tagging && xmlObj.Tagging.TagSet && xmlObj.Tagging.TagSet.Tag) { - const tagResult = xmlObj.Tagging.TagSet.Tag - // if it is a single tag convert into an array so that the return value is always an array. - if (isObject(tagResult)) { - result.push(tagResult) - } else { - result = tagResult - } - } - return result -} - -export function parseLifecycleConfig(xml) { - const xmlObj = parseXml(xml) - return xmlObj.LifecycleConfiguration -} - -export function parseObjectLockConfig(xml) { - const xmlObj = parseXml(xml) - let lockConfigResult = {} - if (xmlObj.ObjectLockConfiguration) { - lockConfigResult = { - objectLockEnabled: xmlObj.ObjectLockConfiguration.ObjectLockEnabled, - } - let retentionResp - if ( - xmlObj.ObjectLockConfiguration && - xmlObj.ObjectLockConfiguration.Rule && - xmlObj.ObjectLockConfiguration.Rule.DefaultRetention - ) { - retentionResp = xmlObj.ObjectLockConfiguration.Rule.DefaultRetention || {} - lockConfigResult.mode = retentionResp.Mode - } - if (retentionResp) { - const isUnitYears = retentionResp.Years - if (isUnitYears) { - lockConfigResult.validity = isUnitYears - lockConfigResult.unit = RETENTION_VALIDITY_UNITS.YEARS - } else { - lockConfigResult.validity = retentionResp.Days - lockConfigResult.unit = RETENTION_VALIDITY_UNITS.DAYS - } - } - return lockConfigResult - } -} - -export function parseObjectRetentionConfig(xml) { - const xmlObj = parseXml(xml) - const retentionConfig = xmlObj.Retention - - return { - mode: retentionConfig.Mode, - retainUntilDate: retentionConfig.RetainUntilDate, - } -} - -export function parseBucketEncryptionConfig(xml) { - let encConfig = parseXml(xml) - return encConfig -} -export function parseReplicationConfig(xml) { - const xmlObj = parseXml(xml) - const replicationConfig = { - ReplicationConfiguration: { - role: xmlObj.ReplicationConfiguration.Role, - rules: toArray(xmlObj.ReplicationConfiguration.Rule), - }, - } - return replicationConfig -} - -export function parseObjectLegalHoldConfig(xml) { - const xmlObj = parseXml(xml) - return xmlObj.LegalHold -} - -export function uploadPartParser(xml) { - const xmlObj = parseXml(xml) - const respEl = xmlObj.CopyPartResult - return respEl -} - -export function removeObjectsParser(xml) { - const xmlObj = parseXml(xml) - if (xmlObj.DeleteResult && xmlObj.DeleteResult.Error) { - // return errors as array always. as the response is object in case of single object passed in removeObjects - return toArray(xmlObj.DeleteResult.Error) - } - return [] -} - -export function parseSelectObjectContentResponse(res) { - // extractHeaderType extracts the first half of the header message, the header type. - function extractHeaderType(stream) { - const headerNameLen = Buffer.from(stream.read(1)).readUInt8() - const headerNameWithSeparator = Buffer.from(stream.read(headerNameLen)).toString() - const splitBySeparator = (headerNameWithSeparator || '').split(':') - const headerName = splitBySeparator.length >= 1 ? splitBySeparator[1] : '' - return headerName - } - - function extractHeaderValue(stream) { - const bodyLen = Buffer.from(stream.read(2)).readUInt16BE() - const bodyName = Buffer.from(stream.read(bodyLen)).toString() - return bodyName - } - - const selectResults = new SelectResults({}) // will be returned - - const responseStream = readableStream(res) // convert byte array to a readable responseStream - while (responseStream._readableState.length) { - // Top level responseStream read tracker. - let msgCrcAccumulator // accumulate from start of the message till the message crc start. - - const totalByteLengthBuffer = Buffer.from(responseStream.read(4)) - msgCrcAccumulator = crc32(totalByteLengthBuffer) - - const headerBytesBuffer = Buffer.from(responseStream.read(4)) - msgCrcAccumulator = crc32(headerBytesBuffer, msgCrcAccumulator) - - const calculatedPreludeCrc = msgCrcAccumulator.readInt32BE() // use it to check if any CRC mismatch in header itself. - - const preludeCrcBuffer = Buffer.from(responseStream.read(4)) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc) - msgCrcAccumulator = crc32(preludeCrcBuffer, msgCrcAccumulator) - - const totalMsgLength = totalByteLengthBuffer.readInt32BE() - const headerLength = headerBytesBuffer.readInt32BE() - const preludeCrcByteValue = preludeCrcBuffer.readInt32BE() - - if (preludeCrcByteValue !== calculatedPreludeCrc) { - // Handle Header CRC mismatch Error - throw new Error( - `Header Checksum Mismatch, Prelude CRC of ${preludeCrcByteValue} does not equal expected CRC of ${calculatedPreludeCrc}`, - ) - } - - const headers = {} - if (headerLength > 0) { - const headerBytes = Buffer.from(responseStream.read(headerLength)) - msgCrcAccumulator = crc32(headerBytes, msgCrcAccumulator) - const headerReaderStream = readableStream(headerBytes) - while (headerReaderStream._readableState.length) { - let headerTypeName = extractHeaderType(headerReaderStream) - headerReaderStream.read(1) // just read and ignore it. - headers[headerTypeName] = extractHeaderValue(headerReaderStream) - } - } - - let payloadStream - const payLoadLength = totalMsgLength - headerLength - 16 - if (payLoadLength > 0) { - const payLoadBuffer = Buffer.from(responseStream.read(payLoadLength)) - msgCrcAccumulator = crc32(payLoadBuffer, msgCrcAccumulator) - // read the checksum early and detect any mismatch so we can avoid unnecessary further processing. - const messageCrcByteValue = Buffer.from(responseStream.read(4)).readInt32BE() - const calculatedCrc = msgCrcAccumulator.readInt32BE() - // Handle message CRC Error - if (messageCrcByteValue !== calculatedCrc) { - throw new Error( - `Message Checksum Mismatch, Message CRC of ${messageCrcByteValue} does not equal expected CRC of ${calculatedCrc}`, - ) - } - payloadStream = readableStream(payLoadBuffer) - } - - const messageType = headers['message-type'] - - switch (messageType) { - case 'error': { - const errorMessage = headers['error-code'] + ':"' + headers['error-message'] + '"' - throw new Error(errorMessage) - } - case 'event': { - const contentType = headers['content-type'] - const eventType = headers['event-type'] - - switch (eventType) { - case 'End': { - selectResults.setResponse(res) - return selectResults - } - - case 'Records': { - const readData = payloadStream.read(payLoadLength) - selectResults.setRecords(readData) - break - } - - case 'Progress': - { - switch (contentType) { - case 'text/xml': { - const progressData = payloadStream.read(payLoadLength) - selectResults.setProgress(progressData.toString()) - break - } - default: { - const errorMessage = `Unexpected content-type ${contentType} sent for event-type Progress` - throw new Error(errorMessage) - } - } - } - break - case 'Stats': - { - switch (contentType) { - case 'text/xml': { - const statsData = payloadStream.read(payLoadLength) - selectResults.setStats(statsData.toString()) - break - } - default: { - const errorMessage = `Unexpected content-type ${contentType} sent for event-type Stats` - throw new Error(errorMessage) - } - } - } - break - default: { - // Continuation message: Not sure if it is supported. did not find a reference or any message in response. - // It does not have a payload. - const warningMessage = `Un implemented event detected ${messageType}.` - // eslint-disable-next-line no-console - console.warn(warningMessage) - } - } // eventType End - } // Event End - } // messageType End - } // Top Level Stream End -} diff --git a/src/test/functional/functional-tests.js b/src/test/functional/functional-tests.js deleted file mode 100644 index 3d2ec950..00000000 --- a/src/test/functional/functional-tests.js +++ /dev/null @@ -1,4731 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -const os = require('os') -const stream = require('stream') -const crypto = require('crypto') -const async = require('async') -const _ = require('lodash') -const fs = require('fs') -const http = require('http') -const https = require('https') -const url = require('url') -const chai = require('chai') -const assert = chai.assert -const superagent = require('superagent') -const uuid = require('uuid') -const splitFile = require('split-file') -const step = require('mocha-steps').step - -let helpers -try { - helpers = require('../../../dist/main/helpers') -} catch (err) { - helpers = require('minio/dist/main/helpers') -} - -let AssumeRoleProvider -try { - AssumeRoleProvider = require('../../../dist/main/AssumeRoleProvider') -} catch (err) { - AssumeRoleProvider = require('minio/dist/main/AssumeRoleProvider') -} -AssumeRoleProvider = AssumeRoleProvider.default - -let minio -try { - minio = require('../../../dist/main/minio') -} catch (err) { - minio = require('minio') -} - -const { getVersionId, isArray, CopyDestinationOptions, CopySourceOptions, removeDirAndFiles, DEFAULT_REGION } = helpers - -require('source-map-support').install() - -const isWindowsPlatform = process.platform === 'win32' - -describe('functional tests', function () { - this.timeout(30 * 60 * 1000) - var clientConfigParams = {} - var region_conf_env = process.env['MINIO_REGION'] - - if (process.env['SERVER_ENDPOINT']) { - var res = process.env['SERVER_ENDPOINT'].split(':') - clientConfigParams.endPoint = res[0] - clientConfigParams.port = parseInt(res[1]) - var access_Key_env = process.env['ACCESS_KEY'] - var secret_key_env = process.env['SECRET_KEY'] - - // If the user provides ENABLE_HTTPS, 1 = secure, anything else = unsecure. - // Otherwise default useSSL as true. - var enable_https_env = process.env['ENABLE_HTTPS'] - // Get the credentials from env vars, error out if they don't exist - if (access_Key_env) { - clientConfigParams.accessKey = access_Key_env - } else { - // eslint-disable-next-line no-console - console.error(`Error: ACCESS_KEY Environment variable is not set`) - process.exit(1) - } - if (secret_key_env) { - clientConfigParams.secretKey = secret_key_env - } else { - // eslint-disable-next-line no-console - console.error(`Error: SECRET_KEY Environment variable is not set`) - process.exit(1) - } - clientConfigParams.useSSL = enable_https_env == '1' - } else { - // If credentials aren't given, default to play.min.io. - clientConfigParams.endPoint = 'play.min.io' - clientConfigParams.port = 9000 - clientConfigParams.accessKey = 'Q3AM3UQ867SPQQA43P2F' - clientConfigParams.secretKey = 'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG' - clientConfigParams.useSSL = true - } - const server_region = region_conf_env || DEFAULT_REGION - - clientConfigParams.region = server_region - // set the partSize to ensure multipart upload chunk size. - // if not set, putObject with stream data and undefined length will use about 500Mb chunkSize (5Tb/10000). - clientConfigParams.partSize = 64 * 1024 * 1024 - - // dataDir is falsy if we need to generate data on the fly. Otherwise, it will be - // a directory with files to read from, i.e. /mint/data. - var dataDir = process.env['MINT_DATA_DIR'] - - var client = new minio.Client(clientConfigParams) - var usEastConfig = clientConfigParams - usEastConfig.region = server_region - var clientUsEastRegion = new minio.Client(usEastConfig) - - var traceStream - // FUNCTIONAL_TEST_TRACE env variable contains the path to which trace - // will be logged. Set it to /dev/stdout log to the stdout. - var trace_func_test_file_path = process.env['FUNCTIONAL_TEST_TRACE'] - if (trace_func_test_file_path) { - // This is necessary for windows. - if (trace_func_test_file_path === 'process.stdout') { - traceStream = process.stdout - } else { - traceStream = fs.createWriteStream(trace_func_test_file_path, { flags: 'a' }) - } - traceStream.write('====================================\n') - client.traceOn(traceStream) - } - - var bucketName = 'minio-js-test-' + uuid.v4() - var objectName = uuid.v4() - - var _1byteObjectName = 'datafile-1-b' - var _1byte = dataDir ? fs.readFileSync(dataDir + '/' + _1byteObjectName) : Buffer.alloc(1, 0) - - var _100kbObjectName = 'datafile-100-kB' - var _100kb = dataDir ? fs.readFileSync(dataDir + '/' + _100kbObjectName) : Buffer.alloc(100 * 1024, 0) - var _100kbObjectNameCopy = _100kbObjectName + '-copy' - - var _100kbObjectBufferName = `${_100kbObjectName}.buffer` - var _MultiPath100kbObjectBufferName = `path/to/${_100kbObjectName}.buffer` - var _100kbmd5 = crypto.createHash('md5').update(_100kb).digest('hex') - var _100kb1kboffsetmd5 = crypto.createHash('md5').update(_100kb.slice(1024)).digest('hex') - - var _65mbObjectName = 'datafile-65-MB' - var _65mb = dataDir ? fs.readFileSync(dataDir + '/' + _65mbObjectName) : Buffer.alloc(65 * 1024 * 1024, 0) - var _65mbmd5 = crypto.createHash('md5').update(_65mb).digest('hex') - var _65mbObjectNameCopy = _65mbObjectName + '-copy' - - var _5mbObjectName = 'datafile-5-MB' - var _5mb = dataDir ? fs.readFileSync(dataDir + '/' + _5mbObjectName) : Buffer.alloc(5 * 1024 * 1024, 0) - var _5mbmd5 = crypto.createHash('md5').update(_5mb).digest('hex') - - // create new http agent to check requests release sockets - var httpAgent = (clientConfigParams.useSSL ? https : http).Agent({ keepAlive: true }) - client.setRequestOptions({ agent: httpAgent }) - var metaData = { - 'Content-Type': 'text/html', - 'Content-Language': 'en', - 'X-Amz-Meta-Testing': 1234, - randomstuff: 5678, - } - - var tmpDir = os.tmpdir() - - function readableStream(data) { - var s = new stream.Readable() - s._read = () => {} - s.push(data) - s.push(null) - return s - } - - before((done) => client.makeBucket(bucketName, server_region, done)) - after((done) => client.removeBucket(bucketName, done)) - - if (traceStream) { - after(() => { - client.traceOff() - if (trace_func_test_file_path !== 'process.stdout') { - traceStream.end() - } - }) - } - - describe('makeBucket with period and region', () => { - if (clientConfigParams.endPoint === 's3.amazonaws.com') { - step('makeBucket(bucketName, region, cb)_region:eu-central-1_', (done) => - client.makeBucket(`${bucketName}.sec.period`, 'eu-central-1', done), - ) - step('removeBucket(bucketName, cb)__', (done) => client.removeBucket(`${bucketName}.sec.period`, done)) - } - }) - - describe('listBuckets', () => { - step('listBuckets(cb)__', (done) => { - client.listBuckets((e, buckets) => { - if (e) { - return done(e) - } - if (_.find(buckets, { name: bucketName })) { - return done() - } - done(new Error('bucket not found')) - }) - }) - step('listBuckets()__', (done) => { - client - .listBuckets() - .then((buckets) => { - if (!_.find(buckets, { name: bucketName })) { - return done(new Error('bucket not found')) - } - }) - .then(() => done()) - .catch(done) - }) - }) - - describe('makeBucket with region', () => { - let isDifferentServerRegion = false - step(`makeBucket(bucketName, region, cb)_bucketName:${bucketName}-region, region:us-east-2_`, (done) => { - try { - clientUsEastRegion.makeBucket(`${bucketName}-region`, 'us-east-2', assert.fail) - } catch (e) { - isDifferentServerRegion = true - done() - } - }) - step(`makeBucket(bucketName, region, cb)_bucketName:${bucketName}-region, region:us-east-1_`, (done) => { - if (!isDifferentServerRegion) { - clientUsEastRegion.makeBucket(`${bucketName}-region`, 'us-east-1', done) - } - done() - }) - step(`removeBucket(bucketName, cb)_bucketName:${bucketName}-region_`, (done) => { - if (!isDifferentServerRegion) { - clientUsEastRegion.removeBucket(`${bucketName}-region`, done) - } - done() - }) - step(`makeBucket(bucketName, region)_bucketName:${bucketName}-region, region:us-east-1_`, (done) => { - if (!isDifferentServerRegion) { - clientUsEastRegion.makeBucket(`${bucketName}-region`, 'us-east-1', (e) => { - if (e) { - // Some object storage servers like Azure, might not delete a bucket rightaway - // Add a sleep of 40 seconds and retry - setTimeout(() => { - clientUsEastRegion.makeBucket(`${bucketName}-region`, 'us-east-1', done) - }, 40 * 1000) - } else { - done() - } - }) - } - done() - }) - step(`removeBucket(bucketName)_bucketName:${bucketName}-region_`, (done) => { - if (!isDifferentServerRegion) { - clientUsEastRegion - .removeBucket(`${bucketName}-region`) - .then(() => done()) - .catch(done) - } - done() - }) - }) - - describe('bucketExists', () => { - step(`bucketExists(bucketName, cb)_bucketName:${bucketName}_`, (done) => client.bucketExists(bucketName, done)) - step(`bucketExists(bucketName, cb)_bucketName:${bucketName}random_`, (done) => { - client.bucketExists(bucketName + 'random', (e, exists) => { - if (e === null && !exists) { - return done() - } - done(new Error()) - }) - }) - step(`bucketExists(bucketName)_bucketName:${bucketName}_`, (done) => { - client - .bucketExists(bucketName) - .then(() => done()) - .catch(done) - }) - }) - - describe('removeBucket', () => { - step(`removeBucket(bucketName, cb)_bucketName:${bucketName}random_`, (done) => { - client.removeBucket(bucketName + 'random', (e) => { - if (e.code === 'NoSuchBucket') { - return done() - } - done(new Error()) - }) - }) - step(`makeBucket(bucketName, region)_bucketName:${bucketName}-region-1, region:us-east-1_`, (done) => { - client - .makeBucket(`${bucketName}-region-1`, '') - .then(() => client.removeBucket(`${bucketName}-region-1`)) - .then(() => done()) - .catch(done) - }) - }) - describe('tests for putObject getObject removeObject with multipath', function () { - step( - `putObject(bucketName, objectName, stream)_bucketName:${bucketName}, objectName:${_MultiPath100kbObjectBufferName}, stream:100Kib_`, - (done) => { - client - .putObject(bucketName, _MultiPath100kbObjectBufferName, _100kb) - .then(() => done()) - .catch(done) - }, - ) - - step( - `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_MultiPath100kbObjectBufferName}_`, - (done) => { - var hash = crypto.createHash('md5') - client.getObject(bucketName, _MultiPath100kbObjectBufferName, (e, stream) => { - if (e) { - return done(e) - } - stream.on('data', (data) => hash.update(data)) - stream.on('error', done) - stream.on('end', () => { - if (hash.digest('hex') === _100kbmd5) { - return done() - } - done(new Error('content mismatch')) - }) - }) - }, - ) - - step( - `removeObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_MultiPath100kbObjectBufferName}_`, - (done) => { - client - .removeObject(bucketName, _MultiPath100kbObjectBufferName) - .then(() => done()) - .catch(done) - }, - ) - }) - describe('tests for putObject copyObject getObject getPartialObject statObject removeObject', function () { - var tmpFileUpload = `${tmpDir}/${_100kbObjectName}` - step( - `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUpload}_`, - (done) => { - fs.writeFileSync(tmpFileUpload, _100kb) - client.fPutObject(bucketName, _100kbObjectName, tmpFileUpload, done) - }, - ) - - step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { - client.statObject(bucketName, _100kbObjectName, (e, stat) => { - if (e) { - return done(e) - } - // As metadata is not provided and there is no file extension, - // we default to 'application/octet-stream' as per `probeContentType` function - if (stat.metaData && stat.metaData['content-type'] !== 'application/octet-stream') { - return done(new Error('content-type mismatch')) - } - done() - }) - }) - - var tmpFileUploadWithExt = `${tmpDir}/${_100kbObjectName}.txt` - step( - `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUploadWithExt}, metaData:${metaData}_`, - (done) => { - fs.writeFileSync(tmpFileUploadWithExt, _100kb) - client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, metaData, done) - }, - ) - - step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { - client.statObject(bucketName, _100kbObjectName, (e, stat) => { - if (e) { - return done(e) - } - // As metadata is provided, even though we have an extension, - // the `content-type` should be equal what was declared on the metadata - if (stat.metaData && stat.metaData['content-type'] !== 'text/html') { - return done(new Error('content-type mismatch')) - } else if (!stat.metaData) { - return done(new Error('no metadata present')) - } - done() - }) - }) - - step( - `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUploadWithExt}_`, - (done) => { - fs.writeFileSync(tmpFileUploadWithExt, _100kb) - client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, done) - }, - ) - - step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { - client.statObject(bucketName, _100kbObjectName, (e, stat) => { - if (e) { - return done(e) - } - // As metadata is not provided but we have a file extension, - // we need to infer `content-type` from the file extension - if (stat.metaData && stat.metaData['content-type'] !== 'text/plain') { - return done(new Error('content-type mismatch')) - } - done() - }) - }) - - step( - `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}, metaData:${metaData}_`, - (done) => { - var stream = readableStream(_100kb) - client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, metaData, done) - }, - ) - - step( - `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}_`, - (done) => { - var stream = readableStream(_100kb) - client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, done) - }, - ) - - step( - `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, - (done) => { - var hash = crypto.createHash('md5') - client.getObject(bucketName, _100kbObjectName, (e, stream) => { - if (e) { - return done(e) - } - stream.on('data', (data) => hash.update(data)) - stream.on('error', done) - stream.on('end', () => { - if (hash.digest('hex') === _100kbmd5) { - return done() - } - done(new Error('content mismatch')) - }) - }) - }, - ) - - step( - `putObject(bucketName, objectName, stream, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_`, - (done) => { - client.putObject(bucketName, _100kbObjectBufferName, _100kb, '', done) - }, - ) - - step( - `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, - (done) => { - var hash = crypto.createHash('md5') - client.getObject(bucketName, _100kbObjectBufferName, (e, stream) => { - if (e) { - return done(e) - } - stream.on('data', (data) => hash.update(data)) - stream.on('error', done) - stream.on('end', () => { - if (hash.digest('hex') === _100kbmd5) { - return done() - } - done(new Error('content mismatch')) - }) - }) - }, - ) - - step( - `putObject(bucketName, objectName, stream, metaData)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_, metaData:{}`, - (done) => { - client - .putObject(bucketName, _100kbObjectBufferName, _100kb, {}) - .then(() => done()) - .catch(done) - }, - ) - - step( - `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:0, length=1024_`, - (done) => { - client - .getPartialObject(bucketName, _100kbObjectBufferName, 0, 1024) - .then((stream) => { - stream.on('data', function () {}) - stream.on('end', done) - }) - .catch(done) - }, - ) - - step( - `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024, length=1024_`, - (done) => { - var expectedHash = crypto.createHash('md5').update(_100kb.slice(1024, 2048)).digest('hex') - var hash = crypto.createHash('md5') - client - .getPartialObject(bucketName, _100kbObjectBufferName, 1024, 1024) - .then((stream) => { - stream.on('data', (data) => hash.update(data)) - stream.on('end', () => { - if (hash.digest('hex') === expectedHash) { - return done() - } - done(new Error('content mismatch')) - }) - }) - .catch(done) - }, - ) - - step( - `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024`, - (done) => { - var hash = crypto.createHash('md5') - client - .getPartialObject(bucketName, _100kbObjectBufferName, 1024) - .then((stream) => { - stream.on('data', (data) => hash.update(data)) - stream.on('end', () => { - if (hash.digest('hex') === _100kb1kboffsetmd5) { - return done() - } - done(new Error('content mismatch')) - }) - }) - .catch(done) - }, - ) - - step( - `getObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, - (done) => { - client - .getObject(bucketName, _100kbObjectBufferName) - .then((stream) => { - stream.on('data', function () {}) - stream.on('end', done) - }) - .catch(done) - }, - ) - - step( - `putObject(bucketName, objectName, stream, metadata, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, - (done) => { - var stream = readableStream(_65mb) - client.putObject(bucketName, _65mbObjectName, stream, metaData, () => { - setTimeout(() => { - if (Object.values(httpAgent.sockets).length === 0) { - return done() - } - done(new Error('http request did not release network socket')) - }, 100) - }) - }, - ) - - step(`getObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { - var hash = crypto.createHash('md5') - client.getObject(bucketName, _65mbObjectName, (e, stream) => { - if (e) { - return done(e) - } - stream.on('data', (data) => hash.update(data)) - stream.on('error', done) - stream.on('end', () => { - if (hash.digest('hex') === _65mbmd5) { - return done() - } - done(new Error('content mismatch')) - }) - }) - }) - - step(`getObject(bucketName, objectName, cb)_bucketName:${bucketName} non-existent object`, (done) => { - client.getObject(bucketName, 'an-object-that-does-not-exist', (e, stream) => { - if (stream) { - return done(new Error('on errors the stream object should not exist')) - } - if (!e) { - return done(new Error('expected an error object')) - } - if (e.code !== 'NoSuchKey') { - return done(new Error('expected NoSuchKey error')) - } - done() - }) - }) - - step( - `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}, offset:0, length:100*1024_`, - (done) => { - var hash = crypto.createHash('md5') - var expectedHash = crypto - .createHash('md5') - .update(_65mb.slice(0, 100 * 1024)) - .digest('hex') - client.getPartialObject(bucketName, _65mbObjectName, 0, 100 * 1024, (e, stream) => { - if (e) { - return done(e) - } - stream.on('data', (data) => hash.update(data)) - stream.on('error', done) - stream.on('end', () => { - if (hash.digest('hex') === expectedHash) { - return done() - } - done(new Error('content mismatch')) - }) - }) - }, - ) - - step( - `copyObject(bucketName, objectName, srcObject, cb)_bucketName:${bucketName}, objectName:${_65mbObjectNameCopy}, srcObject:/${bucketName}/${_65mbObjectName}_`, - (done) => { - client.copyObject(bucketName, _65mbObjectNameCopy, '/' + bucketName + '/' + _65mbObjectName, (e) => { - if (e) { - return done(e) - } - done() - }) - }, - ) - - step( - `copyObject(bucketName, objectName, srcObject)_bucketName:${bucketName}, objectName:${_65mbObjectNameCopy}, srcObject:/${bucketName}/${_65mbObjectName}_`, - (done) => { - client - .copyObject(bucketName, _65mbObjectNameCopy, '/' + bucketName + '/' + _65mbObjectName) - .then(() => done()) - .catch(done) - }, - ) - - step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { - client.statObject(bucketName, _65mbObjectName, (e, stat) => { - if (e) { - return done(e) - } - if (stat.size !== _65mb.length) { - return done(new Error('size mismatch')) - } - if (`${metaData.randomstuff}` !== stat.metaData.randomstuff) { - return done(new Error('metadata "randomstuff" mismatch')) - } - if (`${metaData['X-Amz-Meta-Testing']}` !== stat.metaData['testing']) { - return done(new Error('metadata "testing" mismatch')) - } - if (`${metaData['Content-Type']}` !== stat.metaData['content-type']) { - return done(new Error('metadata "content-type" mismatch')) - } - if (`${metaData['Content-Language']}` !== stat.metaData['content-language']) { - return done(new Error('metadata "content-language" mismatch')) - } - done() - }) - }) - - step(`statObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { - client - .statObject(bucketName, _65mbObjectName) - .then((stat) => { - if (stat.size !== _65mb.length) { - return done(new Error('size mismatch')) - } - }) - .then(() => done()) - .catch(done) - }) - - step(`removeObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { - client - .removeObject(bucketName, _100kbObjectName) - .then(function () { - async.map( - [_100kbObjectBufferName, _65mbObjectName, _65mbObjectNameCopy], - (objectName, cb) => client.removeObject(bucketName, objectName, cb), - done, - ) - }) - .catch(done) - }) - }) - - describe('tests for copyObject statObject', function () { - var etag - var modifiedDate - step( - `putObject(bucketName, objectName, stream, metaData, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream: 100kb, metaData:${metaData}_`, - (done) => { - client.putObject(bucketName, _100kbObjectName, _100kb, metaData, done) - }, - ) - - step( - `copyObject(bucketName, objectName, srcObject, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}_`, - (done) => { - client.copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, (e) => { - if (e) { - return done(e) - } - done() - }) - }, - ) - - step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { - client.statObject(bucketName, _100kbObjectName, (e, stat) => { - if (e) { - return done(e) - } - if (stat.size !== _100kb.length) { - return done(new Error('size mismatch')) - } - assert.equal(stat.metaData['content-type'], metaData['Content-Type']) - assert.equal(stat.metaData['Testing'], metaData['Testing']) - assert.equal(stat.metaData['randomstuff'], metaData['randomstuff']) - etag = stat.etag - modifiedDate = stat.modifiedDate - done() - }) - }) - - step( - `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:ExceptIncorrectEtag_`, - (done) => { - var conds = new minio.CopyConditions() - conds.setMatchETagExcept('TestEtag') - client.copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds, (e) => { - if (e) { - return done(e) - } - done() - }) - }, - ) - - step( - `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:ExceptCorrectEtag_`, - (done) => { - var conds = new minio.CopyConditions() - conds.setMatchETagExcept(etag) - client - .copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds) - .then(() => { - done(new Error('CopyObject should have failed.')) - }) - .catch(() => done()) - }, - ) - - step( - `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:MatchCorrectEtag_`, - (done) => { - var conds = new minio.CopyConditions() - conds.setMatchETag(etag) - client.copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds, (e) => { - if (e) { - return done(e) - } - done() - }) - }, - ) - - step( - `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:MatchIncorrectEtag_`, - (done) => { - var conds = new minio.CopyConditions() - conds.setMatchETag('TestETag') - client - .copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds) - .then(() => { - done(new Error('CopyObject should have failed.')) - }) - .catch(() => done()) - }, - ) - - step( - `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:Unmodified since ${modifiedDate}`, - (done) => { - var conds = new minio.CopyConditions() - conds.setUnmodified(new Date(modifiedDate)) - client.copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds, (e) => { - if (e) { - return done(e) - } - done() - }) - }, - ) - - step( - `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:Unmodified since 2010-03-26T12:00:00Z_`, - (done) => { - var conds = new minio.CopyConditions() - conds.setUnmodified(new Date('2010-03-26T12:00:00Z')) - client - .copyObject(bucketName, _100kbObjectNameCopy, '/' + bucketName + '/' + _100kbObjectName, conds) - .then(() => { - done(new Error('CopyObject should have failed.')) - }) - .catch(() => done()) - }, - ) - - step( - `statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}_`, - (done) => { - client.statObject(bucketName, _100kbObjectNameCopy, (e, stat) => { - if (e) { - return done(e) - } - if (stat.size !== _100kb.length) { - return done(new Error('size mismatch')) - } - done() - }) - }, - ) - - step( - `removeObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}_`, - (done) => { - async.map( - [_100kbObjectName, _100kbObjectNameCopy], - (objectName, cb) => client.removeObject(bucketName, objectName, cb), - done, - ) - }, - ) - }) - - describe('listIncompleteUploads removeIncompleteUpload', () => { - step( - `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}, metaData:${metaData}`, - (done) => { - client.initiateNewMultipartUpload(bucketName, _65mbObjectName, metaData, done) - }, - ) - step( - `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${bucketName}, prefix:${_65mbObjectName}, recursive: true_`, - function (done) { - // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. - // See: https://github.com/minio/minio/commit/75c43bfb6c4a2ace - let hostSkipList = ['s3.amazonaws.com'] - if (!hostSkipList.includes(client.host)) { - this.skip() - } - - var found = false - client - .listIncompleteUploads(bucketName, _65mbObjectName, true) - .on('error', (e) => done(e)) - .on('data', (data) => { - if (data.key === _65mbObjectName) { - found = true - } - }) - .on('end', () => { - if (found) { - return done() - } - done(new Error(`${_65mbObjectName} not found during listIncompleteUploads`)) - }) - }, - ) - step( - `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${bucketName}, recursive: true_`, - function (done) { - // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. - // See: https://github.com/minio/minio/commit/75c43bfb6c4a2ace - let hostSkipList = ['s3.amazonaws.com'] - if (!hostSkipList.includes(client.host)) { - this.skip() - } - - var found = false - client - .listIncompleteUploads(bucketName, '', true) - .on('error', (e) => done(e)) - .on('data', (data) => { - if (data.key === _65mbObjectName) { - found = true - } - }) - .on('end', () => { - if (found) { - return done() - } - done(new Error(`${_65mbObjectName} not found during listIncompleteUploads`)) - }) - }, - ) - step(`removeIncompleteUploads(bucketName, prefix)_bucketName:${bucketName}, prefix:${_65mbObjectName}_`, (done) => { - client.removeIncompleteUpload(bucketName, _65mbObjectName).then(done).catch(done) - }) - }) - - describe('fPutObject fGetObject', function () { - var tmpFileUpload = `${tmpDir}/${_65mbObjectName}` - var tmpFileDownload = `${tmpDir}/${_65mbObjectName}.download` - - step( - `fPutObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}_`, - (done) => { - fs.writeFileSync(tmpFileUpload, _65mb) - client.fPutObject(bucketName, _65mbObjectName, tmpFileUpload, () => { - setTimeout(() => { - if (Object.values(httpAgent.sockets).length === 0) { - return done() - } - done(new Error('http request did not release network socket')) - }, 100) - }) - }, - ) - - step( - `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}, metaData: ${metaData}_`, - (done) => client.fPutObject(bucketName, _65mbObjectName, tmpFileUpload, metaData, done), - ) - step( - `fGetObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileDownload}_`, - (done) => { - client - .fGetObject(bucketName, _65mbObjectName, tmpFileDownload) - .then(() => { - var md5sum = crypto.createHash('md5').update(fs.readFileSync(tmpFileDownload)).digest('hex') - if (md5sum === _65mbmd5) { - return done() - } - return done(new Error('md5sum mismatch')) - }) - .catch(done) - }, - ) - - step( - `removeObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, - (done) => { - fs.unlinkSync(tmpFileDownload) - client - .removeObject(bucketName, _65mbObjectName) - .then(() => done()) - .catch(done) - }, - ) - - step( - `fPutObject(bucketName, objectName, filePath, metaData)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}_`, - (done) => { - client - .fPutObject(bucketName, _65mbObjectName, tmpFileUpload) - .then(() => done()) - .catch(done) - }, - ) - - step( - `fGetObject(bucketName, objectName, filePath)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileDownload}_`, - (done) => { - client - .fGetObject(bucketName, _65mbObjectName, tmpFileDownload) - .then(() => done()) - .catch(done) - }, - ) - - step( - `removeObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, - (done) => { - fs.unlinkSync(tmpFileUpload) - fs.unlinkSync(tmpFileDownload) - client.removeObject(bucketName, _65mbObjectName, done) - }, - ) - }) - describe('fGetObject-resume', () => { - var localFile = `${tmpDir}/${_5mbObjectName}` - var etag = '' - step( - `putObject(bucketName, objectName, stream, metaData, cb)_bucketName:${bucketName}, objectName:${_5mbObjectName}, stream:5mb_`, - (done) => { - var stream = readableStream(_5mb) - client - .putObject(bucketName, _5mbObjectName, stream, _5mb.length, {}) - .then((resp) => { - etag = resp - done() - }) - .catch(done) - }, - ) - step( - `fGetObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_5mbObjectName}, filePath:${localFile}`, - (done) => { - var bufPart = Buffer.alloc(_100kb.length) - _5mb.copy(bufPart, 0, 0, _100kb.length) - var tmpFile = `${tmpDir}/${_5mbObjectName}.${etag}.part.minio` - // create a partial file - fs.writeFileSync(tmpFile, bufPart) - client - .fGetObject(bucketName, _5mbObjectName, localFile) - .then(() => { - var md5sum = crypto.createHash('md5').update(fs.readFileSync(localFile)).digest('hex') - if (md5sum === _5mbmd5) { - return done() - } - return done(new Error('md5sum mismatch')) - }) - .catch(done) - }, - ) - step( - `removeObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_5mbObjectName}_`, - (done) => { - fs.unlinkSync(localFile) - client.removeObject(bucketName, _5mbObjectName, done) - }, - ) - }) - - describe('bucket policy', () => { - let policy = `{"Version":"2012-10-17","Statement":[{"Action":["s3:GetBucketLocation","s3:ListBucket"],"Effect":"Allow","Principal":{"AWS":["*"]},"Resource":["arn:aws:s3:::${bucketName}"],"Sid":""},{"Action":["s3:GetObject"],"Effect":"Allow","Principal":{"AWS":["*"]},"Resource":["arn:aws:s3:::${bucketName}/*"],"Sid":""}]}` - - step(`setBucketPolicy(bucketName, bucketPolicy, cb)_bucketName:${bucketName}, bucketPolicy:${policy}_`, (done) => { - client.setBucketPolicy(bucketName, policy, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - - step(`getBucketPolicy(bucketName, cb)_bucketName:${bucketName}_`, (done) => { - client.getBucketPolicy(bucketName, (err, response) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - if (!response) { - return done(new Error(`policy is empty`)) - } - done() - }) - }) - }) - - describe('Test Remove Objects Response in case of Errors', () => { - // Since functional tests are run with root credentials, it is not implemented. - // Test steps - // ============= - // create a bucket - // add some objects - // create a user - // assign the readonly policy to the user - // use the new user credentials to call remove objects API - // verify the response - // assign the readwrite policy to the user - // call remove objects API - // verify the response - // response.Error is an array - // -[]- empty array indicates success for all objects - // Note: the response code is 200. so the consumer should inspect the response - // Sample Response format: - /** - * { - * Code: 'AccessDenied', - * Message: 'Access Denied.', - * Key: '1.png', - * VersionId: '' - * } - * - * or - * - * { - * Code: 'NoSuchVersion', - * Message: 'The specified version does not exist. (invalid UUID length: 9)', - * Key: '1.png', - * VersionId: 'test-v-is' - * } - */ - /* - let readOnlyPolicy ='{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Action":["s3:GetBucketLocation","s3:GetObject"],"Resource":["arn:aws:s3:::*"]}]}' - let readWritePolicy ='{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Action":["s3:*"],"Resource":["arn:aws:s3:::*"]}]}' - */ - }) - - describe('presigned operations', () => { - step( - `presignedPutObject(bucketName, objectName, expires, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires: 1000_`, - (done) => { - client.presignedPutObject(bucketName, _1byteObjectName, 1000, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'PUT' - options.headers = { - 'content-length': _1byte.length, - } - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 200) { - return done(new Error(`error on put : ${response.statusCode}`)) - } - response.on('error', (e) => done(e)) - response.on('end', () => done()) - response.on('data', () => {}) - }) - request.on('error', (e) => done(e)) - request.write(_1byte) - request.end() - }) - }, - ) - - step( - `presignedPutObject(bucketName, objectName, expires)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:-123_`, - (done) => { - // negative values should trigger an error - client - .presignedPutObject(bucketName, _1byteObjectName, -123) - .then(() => { - done(new Error('negative values should trigger an error')) - }) - .catch(() => done()) - }, - ) - - step( - `presignedPutObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_1byteObjectName}_`, - (done) => { - // Putting the same object should not cause any error - client - .presignedPutObject(bucketName, _1byteObjectName) - .then(() => done()) - .catch(done) - }, - ) - - step( - `presignedGetObject(bucketName, objectName, expires, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:1000_`, - (done) => { - client.presignedGetObject(bucketName, _1byteObjectName, 1000, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'GET' - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 200) { - return done(new Error(`error on put : ${response.statusCode}`)) - } - var error = null - response.on('error', (e) => done(e)) - response.on('end', () => done(error)) - response.on('data', (data) => { - if (data.toString() !== _1byte.toString()) { - error = new Error('content mismatch') - } - }) - }) - request.on('error', (e) => done(e)) - request.end() - }) - }, - ) - - step( - `presignedUrl(httpMethod, bucketName, objectName, expires, cb)_httpMethod:GET, bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:1000_`, - (done) => { - client.presignedUrl('GET', bucketName, _1byteObjectName, 1000, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'GET' - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 200) { - return done(new Error(`error on put : ${response.statusCode}`)) - } - var error = null - response.on('error', (e) => done(e)) - response.on('end', () => done(error)) - response.on('data', (data) => { - if (data.toString() !== _1byte.toString()) { - error = new Error('content mismatch') - } - }) - }) - request.on('error', (e) => done(e)) - request.end() - }) - }, - ) - - step( - `presignedUrl(httpMethod, bucketName, objectName, expires, cb)_httpMethod:GET, bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:86400, requestDate:StartOfDay_`, - (done) => { - var requestDate = new Date() - requestDate.setHours(0, 0, 0, 0) - client.presignedUrl('GET', bucketName, _1byteObjectName, 86400, requestDate, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'GET' - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 200) { - return done(new Error(`error on put : ${response.statusCode}`)) - } - var error = null - response.on('error', (e) => done(e)) - response.on('end', () => done(error)) - response.on('data', (data) => { - if (data.toString() !== _1byte.toString()) { - error = new Error('content mismatch') - } - }) - }) - request.on('error', (e) => done(e)) - request.end() - }) - }, - ) - - step( - `presignedGetObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}_`, - (done) => { - client.presignedGetObject(bucketName, _1byteObjectName, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'GET' - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 200) { - return done(new Error(`error on put : ${response.statusCode}`)) - } - var error = null - response.on('error', (e) => done(e)) - response.on('end', () => done(error)) - response.on('data', (data) => { - if (data.toString() !== _1byte.toString()) { - error = new Error('content mismatch') - } - }) - }) - request.on('error', (e) => done(e)) - request.end() - }) - }, - ) - - step( - `presignedGetObject(bucketName, objectName, expires)_bucketName:${bucketName}, objectName:this.does.not.exist, expires:2938_`, - (done) => { - client - .presignedGetObject(bucketName, 'this.does.not.exist', 2938) - .then(assert.fail) - .catch(() => done()) - }, - ) - - step( - `presignedGetObject(bucketName, objectName, expires, respHeaders, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:1000_`, - (done) => { - var respHeaders = { - 'response-content-type': 'text/html', - 'response-content-language': 'en', - 'response-expires': 'Sun, 07 Jun 2020 16:07:58 GMT', - 'response-cache-control': 'No-cache', - 'response-content-disposition': 'attachment; filename=testing.txt', - 'response-content-encoding': 'gzip', - } - client.presignedGetObject(bucketName, _1byteObjectName, 1000, respHeaders, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'GET' - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 200) { - return done(new Error(`error on get : ${response.statusCode}`)) - } - if (respHeaders['response-content-type'] !== response.headers['content-type']) { - return done(new Error(`content-type header mismatch`)) - } - if (respHeaders['response-content-language'] !== response.headers['content-language']) { - return done(new Error(`content-language header mismatch`)) - } - if (respHeaders['response-expires'] !== response.headers['expires']) { - return done(new Error(`expires header mismatch`)) - } - if (respHeaders['response-cache-control'] !== response.headers['cache-control']) { - return done(new Error(`cache-control header mismatch`)) - } - if (respHeaders['response-content-disposition'] !== response.headers['content-disposition']) { - return done(new Error(`content-disposition header mismatch`)) - } - if (respHeaders['response-content-encoding'] !== response.headers['content-encoding']) { - return done(new Error(`content-encoding header mismatch`)) - } - response.on('data', () => {}) - done() - }) - request.on('error', (e) => done(e)) - request.end() - }) - }, - ) - - step( - `presignedGetObject(bucketName, objectName, respHeaders, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, contentDisposition special chars`, - (done) => { - var respHeaders = { - 'response-content-disposition': - 'attachment; filename="abc|"@#$%&/(<>)/=?!{[\']}+*-_:,;def.png"; filename*=UTF-8\'\'t&21st&20ng.png', - } - client.presignedGetObject(bucketName, _1byteObjectName, 1000, respHeaders, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'GET' - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 200) { - return done(new Error(`error on get : ${response.statusCode}`)) - } - if (respHeaders['response-content-disposition'] !== response.headers['content-disposition']) { - return done(new Error(`content-disposition header mismatch`)) - } - response.on('data', () => {}) - done() - }) - request.on('error', (e) => done(e)) - request.end() - }) - }, - ) - - step( - `presignedGetObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:86400, requestDate:StartOfDay_`, - (done) => { - var requestDate = new Date() - requestDate.setHours(0, 0, 0, 0) - client.presignedGetObject(bucketName, _1byteObjectName, 86400, {}, requestDate, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'GET' - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 200) { - return done(new Error(`error on put : ${response.statusCode}`)) - } - var error = null - response.on('error', (e) => done(e)) - response.on('end', () => done(error)) - response.on('data', (data) => { - if (data.toString() !== _1byte.toString()) { - error = new Error('content mismatch') - } - }) - }) - request.on('error', (e) => done(e)) - request.end() - }) - }, - ) - - step('presignedPostPolicy(postPolicy, cb)_postPolicy:expiresin10days_', (done) => { - var policy = client.newPostPolicy() - policy.setKey(_1byteObjectName) - policy.setBucket(bucketName) - var expires = new Date() - expires.setSeconds(24 * 60 * 60 * 10) - policy.setExpires(expires) - - client.presignedPostPolicy(policy, (e, data) => { - if (e) { - return done(e) - } - var req = superagent.post(data.postURL) - _.each(data.formData, (value, key) => req.field(key, value)) - req.attach('file', Buffer.from([_1byte]), 'test') - req.end(function (e) { - if (e) { - return done(e) - } - done() - }) - req.on('error', (e) => done(e)) - }) - }) - - step('presignedPostPolicy(postPolicy, cb)_postPolicy:setContentType', (done) => { - var policy = client.newPostPolicy() - policy.setKey(_1byteObjectName) - policy.setBucket(bucketName) - policy.setContentType('text/plain') - - client.presignedPostPolicy(policy, (e, data) => { - if (e) { - return done(e) - } - var req = superagent.post(data.postURL) - _.each(data.formData, (value, key) => req.field(key, value)) - req.attach('file', Buffer.from([_1byte]), 'test') - req.end(function (e) { - if (e) { - return done(e) - } - done() - }) - req.on('error', (e) => done(e)) - }) - }) - - step('presignedPostPolicy(postPolicy, cb)_postPolicy:setContentTypeStartsWith', (done) => { - var policy = client.newPostPolicy() - policy.setKey(_1byteObjectName) - policy.setBucket(bucketName) - policy.setContentTypeStartsWith('text/') - - client.presignedPostPolicy(policy, (e, data) => { - if (e) { - return done(e) - } - var req = superagent.post(data.postURL) - _.each(data.formData, (value, key) => req.field(key, value)) - req.attach('file', Buffer.from([_1byte]), 'test') - req.end(function (e) { - if (e) { - return done(e) - } - done() - }) - req.on('error', (e) => done(e)) - }) - }) - - step('presignedPostPolicy(postPolicy, cb)_postPolicy:setContentDisposition_inline', (done) => { - var policy = client.newPostPolicy() - var objectName = 'test-content-disposition' + uuid.v4() - policy.setKey(objectName) - policy.setBucket(bucketName) - policy.setContentDisposition('inline') - - client.presignedPostPolicy(policy, (e, data) => { - if (e) { - return done(e) - } - var req = superagent.post(data.postURL) - _.each(data.formData, (value, key) => req.field(key, value)) - req.attach('file', Buffer.from([_1byte]), 'test') - req.end(function (e) { - if (e) { - return done(e) - } - client.removeObject(bucketName, objectName, done) - }) - req.on('error', (e) => done(e)) - }) - }) - - step('presignedPostPolicy(postPolicy, cb)_postPolicy:setContentDisposition_attachment', (done) => { - var policy = client.newPostPolicy() - var objectName = 'test-content-disposition' + uuid.v4() - policy.setKey(objectName) - policy.setBucket(bucketName) - policy.setContentDisposition('attachment; filename= My* Docume! nt.json') - - client.presignedPostPolicy(policy, (e, data) => { - if (e) { - return done(e) - } - var req = superagent.post(data.postURL) - _.each(data.formData, (value, key) => req.field(key, value)) - req.attach('file', Buffer.from([_1byte]), 'test') - req.end(function (e) { - if (e) { - return done(e) - } - client.removeObject(bucketName, objectName, done) - }) - req.on('error', (e) => done(e)) - }) - }) - - step('presignedPostPolicy(postPolicy, cb)_postPolicy:setUserMetaData_', (done) => { - var policy = client.newPostPolicy() - var objectName = 'test-metadata' + uuid.v4() - policy.setKey(objectName) - policy.setBucket(bucketName) - policy.setUserMetaData({ - key: 'my-value', - anotherKey: 'another-value', - }) - - client.presignedPostPolicy(policy, (e, data) => { - if (e) { - return done(e) - } - var req = superagent.post(data.postURL) - _.each(data.formData, (value, key) => req.field(key, value)) - req.attach('file', Buffer.from([_1byte]), 'test') - req.end(function (e) { - if (e) { - return done(e) - } - client.removeObject(bucketName, objectName, done) - }) - req.on('error', (e) => done(e)) - }) - }) - - step('presignedPostPolicy(postPolicy)_postPolicy: null_', (done) => { - client - .presignedPostPolicy(null) - .then(() => { - done(new Error('null policy should fail')) - }) - .catch(() => done()) - }) - - step( - `presignedUrl(httpMethod, bucketName, objectName, expires, reqParams, cb)_httpMethod:GET, bucketName:${bucketName}, expires:1000_`, - (done) => { - client.presignedUrl('GET', bucketName, '', 1000, { prefix: 'data', 'max-keys': 1000 }, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'GET' - options.headers = {} - var str = '' - if (options.protocol === 'https:') { - transport = https - } - var callback = function (response) { - if (response.statusCode !== 200) { - return done(new Error(`error on put : ${response.statusCode}`)) - } - response.on('error', (e) => done(e)) - response.on('end', function () { - if (!str.match(`${_1byteObjectName}`)) { - return done(new Error('Listed object does not match the object in the bucket!')) - } - done() - }) - response.on('data', function (chunk) { - str += chunk - }) - } - var request = transport.request(options, callback) - request.end() - }) - }, - ) - - step( - `presignedUrl(httpMethod, bucketName, objectName, expires, cb)_httpMethod:DELETE, bucketName:${bucketName}, objectName:${_1byteObjectName}, expires:1000_`, - (done) => { - client.presignedUrl('DELETE', bucketName, _1byteObjectName, 1000, (e, presignedUrl) => { - if (e) { - return done(e) - } - var transport = http - var options = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - options.method = 'DELETE' - options.headers = {} - if (options.protocol === 'https:') { - transport = https - } - var request = transport.request(options, (response) => { - if (response.statusCode !== 204) { - return done(new Error(`error on put : ${response.statusCode}`)) - } - response.on('error', (e) => done(e)) - response.on('end', () => done()) - response.on('data', () => {}) - }) - request.on('error', (e) => done(e)) - request.end() - }) - }, - ) - }) - - describe('listObjects', function () { - var listObjectPrefix = 'miniojsPrefix' - var listObjectsNum = 10 - var objArray = [] - var listArray = [] - var listPrefixArray = [] - - step( - `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, - (done) => { - _.times(listObjectsNum, (i) => objArray.push(`${listObjectPrefix}.${i}`)) - objArray = objArray.sort() - async.mapLimit( - objArray, - 20, - (objectName, cb) => client.putObject(bucketName, objectName, readableStream(_1byte), _1byte.length, {}, cb), - done, - ) - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${bucketName}, prefix: miniojsprefix, recursive:true_`, - (done) => { - client - .listObjects(bucketName, listObjectPrefix, true) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray, listPrefixArray)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data.name) - }) - }, - ) - - step('listObjects(bucketName, prefix, recursive)_recursive:true_', (done) => { - try { - client.listObjects('', '', true).on('end', () => { - return done(new Error(`listObjects should throw exception when empty bucketname is passed`)) - }) - } catch (e) { - if (e.name === 'InvalidBucketNameError') { - done() - } else { - done(e) - } - } - }) - - step(`listObjects(bucketName, prefix, recursive)_bucketName:${bucketName}, recursive:false_`, (done) => { - listArray = [] - client - .listObjects(bucketName, '', false) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray, listArray)) { - return done() - } - return done(new Error(`listObjects lists ${listArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listArray.push(data.name) - }) - }) - - step( - `listObjectsV2(bucketName, prefix, recursive, startAfter)_bucketName:${bucketName}, recursive:true_`, - (done) => { - listArray = [] - client - .listObjectsV2(bucketName, '', true, '') - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray, listArray)) { - return done() - } - return done(new Error(`listObjects lists ${listArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listArray.push(data.name) - }) - }, - ) - - step( - `listObjectsV2WithMetadata(bucketName, prefix, recursive, startAfter)_bucketName:${bucketName}, recursive:true_`, - (done) => { - listArray = [] - client.extensions - .listObjectsV2WithMetadata(bucketName, '', true, '') - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray, listArray)) { - return done() - } - return done(new Error(`listObjects lists ${listArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listArray.push(data.name) - }) - }, - ) - - step( - `removeObject(bucketName, objectName, callback)_bucketName:${bucketName}_Remove ${listObjectsNum} objects`, - (done) => { - async.mapLimit(listArray, 20, (objectName, cb) => client.removeObject(bucketName, objectName, cb), done) - }, - ) - }) - - describe('removeObjects', function () { - var listObjectPrefix = 'miniojsPrefix' - var listObjectsNum = 10 - var objArray = [] - var objectsList = [] - - step( - `putObject(bucketName, objectName, stream, size, contentType, callback)_bucketName:${bucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, - (done) => { - _.times(listObjectsNum, (i) => objArray.push(`${listObjectPrefix}.${i}`)) - objArray = objArray.sort() - async.mapLimit( - objArray, - 20, - (objectName, cb) => client.putObject(bucketName, objectName, readableStream(_1byte), _1byte.length, '', cb), - done, - ) - }, - ) - - step(`listObjects(bucketName, prefix, recursive)_bucketName:${bucketName}, recursive:false_`, (done) => { - client - .listObjects(bucketName, listObjectPrefix, false) - .on('error', done) - .on('end', () => { - try { - client.removeObjects(bucketName, '', function (e) { - if (e) { - done() - } - }) - } catch (e) { - if (e.name === 'InvalidArgumentError') { - done() - } - } - }) - .on('data', (data) => { - objectsList.push(data.name) - }) - }) - - objectsList = [] - - step(`listObjects(bucketName, prefix, recursive)_bucketName:${bucketName}, recursive:false_`, (done) => { - client - .listObjects(bucketName, listObjectPrefix, false) - .on('error', done) - .on('end', () => { - client.removeObjects(bucketName, objectsList, function (e) { - if (e) { - done(e) - } - done() - }) - }) - .on('data', (data) => { - objectsList.push(data.name) - }) - }) - - // Non latin characters - step(`putObject(bucketName, objectName, stream)_bucketName:${bucketName}, objectName:fileΩ, stream:1b`, (done) => { - client - .putObject(bucketName, 'fileΩ', _1byte) - .then(() => done()) - .catch(done) - }) - - step(`removeObjects with non latin characters`, (done) => { - client - .removeObjects(bucketName, ['fileΩ']) - .then(() => done()) - .catch(done) - }) - }) - - describe('bucket notifications', () => { - describe('#listenBucketNotification', () => { - before(function () { - // listenBucketNotification only works on MinIO, so skip if - // the host is Amazon. - let hostSkipList = ['s3.amazonaws.com'] - if (hostSkipList.includes(client.host)) { - this.skip() - } - }) - - step( - `listenBucketNotification(bucketName, prefix, suffix, events)_bucketName:${bucketName}, prefix:photos/, suffix:.jpg, events:bad_`, - (done) => { - let poller = client.listenBucketNotification(bucketName, 'photos/', '.jpg', ['bad']) - poller.on('error', (error) => { - if (error.code !== 'NotImplemented') { - assert.match(error.message, /A specified event is not supported for notifications./) - assert.equal(error.code, 'InvalidArgument') - } - done() - }) - }, - ) - step( - `listenBucketNotification(bucketName, prefix, suffix, events)_bucketName:${bucketName}, events: s3:ObjectCreated:*_`, - (done) => { - let poller = client.listenBucketNotification(bucketName, '', '', ['s3:ObjectCreated:*']) - let records = 0 - let pollerError = null - poller.on('notification', (record) => { - records++ - - assert.equal(record.eventName, 's3:ObjectCreated:Put') - assert.equal(record.s3.bucket.name, bucketName) - assert.equal(record.s3.object.key, objectName) - }) - poller.on('error', (error) => { - pollerError = error - }) - setTimeout(() => { - // Give it some time for the notification to be setup. - if (pollerError) { - if (pollerError.code !== 'NotImplemented') { - done(pollerError) - } else { - done() - } - return - } - client.putObject(bucketName, objectName, 'stringdata', (err) => { - if (err) { - return done(err) - } - setTimeout(() => { - // Give it some time to get the notification. - poller.stop() - client.removeObject(bucketName, objectName, (err) => { - if (err) { - return done(err) - } - if (!records) { - return done(new Error('notification not received')) - } - done() - }) - }, 10 * 1000) - }) - }, 10 * 1000) - }, - ) - - // This test is very similar to that above, except it does not include - // Minio.ObjectCreatedAll in the config. Thus, no events should be emitted. - step( - `listenBucketNotification(bucketName, prefix, suffix, events)_bucketName:${bucketName}, events:s3:ObjectRemoved:*`, - (done) => { - let poller = client.listenBucketNotification(bucketName, '', '', ['s3:ObjectRemoved:*']) - poller.on('notification', assert.fail) - poller.on('error', (error) => { - if (error.code !== 'NotImplemented') { - done(error) - } - }) - - client.putObject(bucketName, objectName, 'stringdata', (err) => { - if (err) { - return done(err) - } - // It polls every five seconds, so wait for two-ish polls, then end. - setTimeout(() => { - poller.stop() - poller.removeAllListeners('notification') - // clean up object now - client.removeObject(bucketName, objectName, done) - }, 11 * 1000) - }) - }, - ) - }) - }) - - describe('Bucket Versioning API', () => { - // Isolate the bucket/object for easy debugging and tracking. - const versionedBucketName = 'minio-js-test-version-' + uuid.v4() - before((done) => client.makeBucket(versionedBucketName, '', done)) - after((done) => client.removeBucket(versionedBucketName, done)) - - describe('Versioning Steps test', function () { - step('Check if versioning is enabled on a bucket', (done) => { - client.getBucketVersioning(versionedBucketName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - step('Enable versioning on a bucket', (done) => { - client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - - step('Suspend versioning on a bucket', (done) => { - client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - - step('Check if versioning is Suspended on a bucket', (done) => { - client.getBucketVersioning(versionedBucketName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - }) - }) - - describe('Versioning tests on a buckets', function () { - // Isolate the bucket/object for easy debugging and tracking. - const versionedBucketName = 'minio-js-test-version-' + uuid.v4() - const versioned_100kbObjectName = 'datafile-100-kB' - const versioned_100kb_Object = dataDir - ? fs.readFileSync(dataDir + '/' + versioned_100kbObjectName) - : Buffer.alloc(100 * 1024, 0) - - before((done) => client.makeBucket(versionedBucketName, '', done)) - after((done) => client.removeBucket(versionedBucketName, done)) - - describe('Versioning Steps test', function () { - let versionId - - step( - `setBucketVersioning(bucketName, versionConfig):_bucketName:${versionedBucketName},versionConfig:{Status:"Enabled"} `, - (done) => { - client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }, - ) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, - (done) => { - client - .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) - .then(() => done()) - .catch(done) - }, - ) - - step( - `statObject(bucketName, objectName, statOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, - (done) => { - client.statObject(versionedBucketName, versioned_100kbObjectName, {}, (e, res) => { - versionId = res.versionId - done() - }) - }, - ) - - step( - `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, - (done) => { - client.removeObject(versionedBucketName, versioned_100kbObjectName, { versionId: versionId }, () => { - done() - }) - }, - ) - - step( - `setBucketVersioning(bucketName, versionConfig):_bucketName:${versionedBucketName},versionConfig:{Status:"Suspended"}`, - (done) => { - client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }, - ) - }) - }) - - describe('Versioning tests on a buckets: getObject, fGetObject, getPartialObject, putObject, removeObject with versionId support', function () { - // Isolate the bucket/object for easy debugging and tracking. - const versionedBucketName = 'minio-js-test-version-' + uuid.v4() - const versioned_100kbObjectName = 'datafile-100-kB' - const versioned_100kb_Object = dataDir - ? fs.readFileSync(dataDir + '/' + versioned_100kbObjectName) - : Buffer.alloc(100 * 1024, 0) - - before((done) => client.makeBucket(versionedBucketName, '', done)) - after((done) => client.removeBucket(versionedBucketName, done)) - - describe('Versioning Test for getObject, getPartialObject, putObject, removeObject with versionId support', function () { - let versionId = null - step( - `Enable Versioning on Bucket: setBucketVersioning(bucketName,versioningConfig)_bucketName:${versionedBucketName},{Status:"Enabled"}`, - (done) => { - client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }, - ) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, - (done) => { - client - .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) - .then((res = {}) => { - if (res.versionId) { - versionId = res.versionId // In gateway mode versionId will not be returned. - } - done() - }) - .catch(done) - }, - ) - - step( - `getObject(bucketName, objectName, getOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, - (done) => { - if (versionId) { - client.getObject( - versionedBucketName, - versioned_100kbObjectName, - { versionId: versionId }, - function (e, dataStream) { - const objVersion = getVersionId(dataStream.headers) - if (objVersion) { - done() - } else { - done(new Error('versionId not found in getObject response')) - } - }, - ) - } else { - done() - } - }, - ) - - step( - `fGetObject(bucketName, objectName, filePath, getOpts={})_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, - (done) => { - if (versionId) { - var tmpFileDownload = `${tmpDir}/${versioned_100kbObjectName}.download` - client.fGetObject( - versionedBucketName, - versioned_100kbObjectName, - tmpFileDownload, - { versionId: versionId }, - function () { - done() - }, - ) - } else { - done() - } - }, - ) - - step( - `getPartialObject(bucketName, objectName, offset, length, getOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}`, - (done) => { - if (versionId) { - client.getPartialObject( - versionedBucketName, - versioned_100kbObjectName, - 10, - 30, - { versionId: versionId }, - function (e, dataStream) { - const objVersion = getVersionId(dataStream.headers) - if (objVersion) { - done() - } else { - done(new Error('versionId not found in getPartialObject response')) - } - }, - ) - } else { - done() - } - }, - ) - - step( - `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName},removeOpts:{versionId:${versionId}`, - (done) => { - if (versionId) { - client.removeObject(versionedBucketName, versioned_100kbObjectName, { versionId: versionId }, () => { - done() - }) - } else { - // In gateway mode, use regular delete to remove an object so that the bucket can be cleaned up. - client.removeObject(versionedBucketName, versioned_100kbObjectName, () => { - done() - }) - } - }, - ) - - step( - `setBucketVersioning(bucketName, versionConfig):_bucketName:${versionedBucketName},versionConfig:{Status:"Suspended"}`, - (done) => { - client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }, - ) - }) - }) - - describe('Versioning Supported listObjects', function () { - const versionedBucketName = 'minio-js-test-version-list' + uuid.v4() - const prefixName = 'Prefix1' - const versionedObjectName = 'datafile-100-kB' - const objVersionIdCounter = [1, 2, 3, 4, 5] // This should track adding 5 versions of the same object. - let listObjectsNum = objVersionIdCounter.length - let objArray = [] - let listPrefixArray = [] - let isVersioningSupported = false - - const objNameWithPrefix = `${prefixName}/${versionedObjectName}` - - before((done) => - client.makeBucket(versionedBucketName, '', () => { - client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - isVersioningSupported = true - done() - }) - }), - ) - after((done) => client.removeBucket(versionedBucketName, done)) - - step( - `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${versionedBucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, - (done) => { - if (isVersioningSupported) { - let count = 1 - objVersionIdCounter.forEach(() => { - client.putObject( - versionedBucketName, - objNameWithPrefix, - readableStream(_1byte), - _1byte.length, - {}, - (e, data) => { - objArray.push(data) - if (count === objVersionIdCounter.length) { - done() - } - count += 1 - }, - ) - }) - } else { - done() - } - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, - (done) => { - if (isVersioningSupported) { - client - .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray.length, listPrefixArray.length)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data) - }) - } else { - done() - } - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: ${prefixName}, recursive:true_`, - (done) => { - if (isVersioningSupported) { - listPrefixArray = [] - client - .listObjects(versionedBucketName, prefixName, true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray.length, listPrefixArray.length)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data) - }) - } else { - done() - } - }, - ) - - step( - `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}_Remove ${listObjectsNum} objects`, - (done) => { - if (isVersioningSupported) { - let count = 1 - listPrefixArray.forEach((item) => { - client.removeObject(versionedBucketName, item.name, { versionId: item.versionId }, () => { - if (count === listPrefixArray.length) { - done() - } - count += 1 - }) - }) - } else { - done() - } - }, - ) - }) - - describe('Versioning tests on a bucket for Deletion of Multiple versions', function () { - // Isolate the bucket/object for easy debugging and tracking. - const versionedBucketName = 'minio-js-test-version-' + uuid.v4() - const versioned_100kbObjectName = 'datafile-100-kB' - const versioned_100kb_Object = dataDir - ? fs.readFileSync(dataDir + '/' + versioned_100kbObjectName) - : Buffer.alloc(100 * 1024, 0) - - before((done) => client.makeBucket(versionedBucketName, '', done)) - after((done) => client.removeBucket(versionedBucketName, done)) - - describe('Test for removal of multiple versions', function () { - let isVersioningSupported = false - const objVersionList = [] - step( - `setBucketVersioning(bucketName, versionConfig):_bucketName:${versionedBucketName},versionConfig:{Status:"Enabled"} `, - (done) => { - client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - isVersioningSupported = true - done() - }) - }, - ) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, - (done) => { - if (isVersioningSupported) { - client - .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - // Put two versions of the same object. - step( - `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, - (done) => { - // Put two versions of the same object. - if (isVersioningSupported) { - client - .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, - (done) => { - if (isVersioningSupported) { - client - .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(2, objVersionList.length)) { - return done() - } - return done(new Error(`listObjects lists ${objVersionList.length} objects, expected ${2}`)) - }) - .on('data', (data) => { - // Pass list object response as is to remove objects - objVersionList.push(data) - }) - } else { - done() - } - }, - ) - - step( - `removeObjects(bucketName, objectList, removeOpts)_bucketName:${versionedBucketName}_Remove ${objVersionList.length} objects`, - (done) => { - if (isVersioningSupported) { - let count = 1 - objVersionList.forEach(() => { - // remove multiple versions of the object. - client.removeObjects(versionedBucketName, objVersionList, () => { - if (count === objVersionList.length) { - done() - } - count += 1 - }) - }) - } else { - done() - } - }, - ) - }) - }) - - describe('Bucket Tags API', () => { - // Isolate the bucket/object for easy debugging and tracking. - const tagsBucketName = 'minio-js-test-tags-' + uuid.v4() - before((done) => client.makeBucket(tagsBucketName, '', done)) - after((done) => client.removeBucket(tagsBucketName, done)) - - describe('set, get and remove Tags on a bucket', function () { - step(`Set tags on a bucket_bucketName:${tagsBucketName}`, (done) => { - client.setBucketTagging(tagsBucketName, { 'test-tag-key': 'test-tag-value' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - step(`Get tags on a bucket_bucketName:${tagsBucketName}`, (done) => { - client.getBucketTagging(tagsBucketName, (err, tagList) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - if (isArray(tagList)) { - done() - } - }) - }) - - step(`remove Tags on a bucket_bucketName:${tagsBucketName}`, (done) => { - client.removeBucketTagging(tagsBucketName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - }) - }) - - describe('Object Tags API', () => { - // Isolate the bucket/object for easy debugging and tracking. - const tagsBucketName = 'minio-js-test-tags-' + uuid.v4() - before((done) => client.makeBucket(tagsBucketName, '', done)) - after((done) => client.removeBucket(tagsBucketName, done)) - - const tagObjName = 'datafile-100-kB' - const tagObject = Buffer.alloc(100 * 1024, 0) - - describe('set, get and remove Tags on an object', function () { - step( - `putObject(bucketName, objectName, stream)_bucketName:${tagsBucketName}, objectName:${tagObjName}, stream:100Kib_`, - (done) => { - client - .putObject(tagsBucketName, tagObjName, tagObject) - .then(() => done()) - .catch(done) - }, - ) - - step(`putObjectTagging object_bucketName:${tagsBucketName}, objectName:${tagObjName},`, (done) => { - client.setObjectTagging(tagsBucketName, tagObjName, { 'test-tag-key-obj': 'test-tag-value-obj' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - - step(`getObjectTagging object_bucketName:${tagsBucketName}, objectName:${tagObjName},`, (done) => { - client.getObjectTagging(tagsBucketName, tagObjName, (err, tagList) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - if (isArray(tagList)) { - done() - } - }) - }) - - step(`removeObjectTagging on an object_bucketName:${tagsBucketName}, objectName:${tagObjName},`, (done) => { - client.removeObjectTagging(tagsBucketName, tagObjName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - step(`removeObject object_bucketName:${tagsBucketName}, objectName:${tagObjName},`, (done) => { - client.removeObject(tagsBucketName, tagObjName, () => { - done() - }) - }) - }) - }) - - describe('Object Tags API with Versioning support', () => { - // Isolate the bucket/object for easy debugging and tracking. - const tagsVersionedBucketName = 'minio-js-test-tags-version-' + uuid.v4() - before((done) => client.makeBucket(tagsVersionedBucketName, '', done)) - after((done) => client.removeBucket(tagsVersionedBucketName, done)) - - const tagObjName = 'datafile-100-kB' - const tagObject = Buffer.alloc(100 * 1024, 0) - let isVersioningSupported = false - let versionId = null - - describe('set, get and remove Tags on a versioned object', function () { - step( - `Enable Versioning on Bucket: setBucketVersioning(bucketName,versioningConfig)_bucketName:${tagsVersionedBucketName},{Status:"Enabled"}`, - (done) => { - client.setBucketVersioning(tagsVersionedBucketName, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - isVersioningSupported = true - done() - }) - }, - ) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName}, stream:100Kib_`, - (done) => { - if (isVersioningSupported) { - client - .putObject(tagsVersionedBucketName, tagObjName, tagObject) - .then((res = {}) => { - if (res.versionId) { - versionId = res.versionId // In gateway mode versionId will not be returned. - } - done() - }) - .catch(done) - } else { - done() - } - }, - ) - - step(`Set tags on an object_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName},`, (done) => { - if (isVersioningSupported) { - client.setObjectTagging( - tagsVersionedBucketName, - tagObjName, - { 'test-tag-key-obj': 'test-tag-value-obj' }, - { versionId: versionId }, - (err) => { - if (err) { - return done(err) - } - done() - }, - ) - } else { - done() - } - }) - - step(`Get tags on an object_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName},`, (done) => { - if (isVersioningSupported) { - client.getObjectTagging(tagsVersionedBucketName, tagObjName, { versionId: versionId }, (err, tagList) => { - if (err) { - return done(err) - } - if (isArray(tagList)) { - done() - } - }) - } else { - done() - } - }) - - step(`remove Tags on an object_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName},`, (done) => { - if (isVersioningSupported) { - client.removeObjectTagging(tagsVersionedBucketName, tagObjName, { versionId: versionId }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - } else { - done() - } - }) - step(`remove Tags on an object_bucketName:${tagsVersionedBucketName}, objectName:${tagObjName},`, (done) => { - if (isVersioningSupported) { - client.removeObject(tagsVersionedBucketName, tagObjName, { versionId: versionId }, () => { - done() - }) - } else { - done() - } - }) - }) - }) - - describe('Bucket Lifecycle API', () => { - const bucketName = 'minio-js-test-lifecycle-' + uuid.v4() - before((done) => client.makeBucket(bucketName, '', done)) - after((done) => client.removeBucket(bucketName, done)) - - describe('Set, Get Lifecycle config Tests', function () { - step(`Set lifecycle config on a bucket:_bucketName:${bucketName}`, (done) => { - const lifecycleConfig = { - Rule: [ - { - ID: 'Transition and Expiration Rule', - Status: 'Enabled', - Filter: { - Prefix: '', - }, - Expiration: { - Days: '3650', - }, - }, - ], - } - client.setBucketLifecycle(bucketName, lifecycleConfig, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - - step('Set lifecycle config of a bucket', (done) => { - client.getBucketLifecycle(bucketName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - - step('Remove lifecycle config of a bucket', (done) => { - client.removeBucketLifecycle(bucketName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - }) - }) - - describe('Versioning Supported preSignedUrl Get, Put Tests', function () { - /** - * Test Steps - * 1. Create Versioned Bucket - * 2. presignedPutObject of 2 Versions of different size - * 3. List and ensure that there are two versions - * 4. presignedGetObject with versionId to ensure that we are able to get - * 5. Remove all object versions at once - * 6. Cleanup bucket. - */ - - const versionedBucketName = 'minio-js-test-ver-presign-' + uuid.v4() - const versionedPresignObjName = 'datafile-1-b' - const _100_byte = Buffer.alloc(100 * 1024, 0) - const _200_byte = Buffer.alloc(200 * 1024, 0) - let isVersioningSupported = false - const objectsList = [] - const expectedVersionsCount = 2 - - before((done) => - client.makeBucket(versionedBucketName, '', () => { - client.setBucketVersioning(versionedBucketName, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - isVersioningSupported = true - done() - }) - }), - ) - after((done) => client.removeBucket(versionedBucketName, done)) - - step( - `presignedPutObject(bucketName, objectName, expires=1000, cb)_bucketName:${versionedBucketName} ${versionedPresignObjName} _version:1`, - (done) => { - if (isVersioningSupported) { - client.presignedPutObject(versionedBucketName, versionedPresignObjName, 1000, (e, presignedUrl) => { - if (e) { - done(e) - } - let mobileClientReqWithProtocol = http - var upldRequestOptions = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - upldRequestOptions.method = 'PUT' - upldRequestOptions.headers = { - 'content-length': _100_byte.length, - } - if (upldRequestOptions.protocol === 'https:') { - mobileClientReqWithProtocol = https - } - const uploadRequest = mobileClientReqWithProtocol.request(upldRequestOptions, (response) => { - if (response.statusCode !== 200) { - return new Error(`error on put : ${response.statusCode}`) - } - response.on('error', (err) => { - done(err) - }) - response.on('end', () => { - done() - }) - response.on('data', () => { - // just drain - }) - }) - - uploadRequest.on('error', (er) => { - done(er) - }) - - uploadRequest.write(_100_byte) - uploadRequest.end() - }) - } else { - done() - } - }, - ) - - step( - `presignedPutObject(bucketName, objectName, expires=1000, cb)_bucketName:${versionedBucketName} ${versionedPresignObjName} _version:2`, - (done) => { - if (isVersioningSupported) { - client.presignedPutObject(versionedBucketName, versionedPresignObjName, 1000, (e, presignedUrl) => { - if (e) { - done(e) - } - let mobileClientReqWithProtocol = http - var upldRequestOptions = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - upldRequestOptions.method = 'PUT' - upldRequestOptions.headers = { - 'content-length': _200_byte.length, - } - if (upldRequestOptions.protocol === 'https:') { - mobileClientReqWithProtocol = https - } - const uploadRequest = mobileClientReqWithProtocol.request(upldRequestOptions, (response) => { - if (response.statusCode !== 200) { - return new Error(`error on put : ${response.statusCode}`) - } - response.on('error', (err) => { - done(err) - }) - response.on('end', () => { - done() - }) - response.on('data', () => { - // just drain - }) - }) - - uploadRequest.on('error', (er) => { - done(er) - }) - - uploadRequest.write(_200_byte) - uploadRequest.end() - }) - } else { - done() - } - }, - ) - - step( - `listObjects(bucketName, '', true, {IncludeVersion: true}, cb)_bucketName:${versionedBucketName} _prefix:""`, - (done) => { - if (isVersioningSupported) { - const objectsStream = client.listObjects(versionedBucketName, '', true, { IncludeVersion: true }) - objectsStream.on('data', function (obj) { - objectsList.push({ versionId: obj.versionId, name: obj.name }) - }) - - objectsStream.on('error', function () { - return done() - }) - objectsStream.on('end', function () { - const objectListCount = objectsList.length - if (objectListCount === expectedVersionsCount) { - done() - } else { - return done( - new Error(`Version count does not match for versioned presigned url test. ${expectedVersionsCount}`), - ) - } - }) - } else { - done() - } - }, - ) - - step( - `presignedGetObject(bucketName, objectName, 1000, respHeaders, requestDate, cb)_bucketName:${versionedBucketName} _objectName:${versionedPresignObjName} _version:(2/2)`, - (done) => { - if (isVersioningSupported) { - client.presignedGetObject( - versionedBucketName, - objectsList[1].name, - 1000, - { versionId: objectsList[1].versionId }, - new Date(), - (e, presignedUrl) => { - if (e) { - return done() - } - let mobileClientReqWithProtocol = http - const getReqOpts = _.pick(url.parse(presignedUrl), ['hostname', 'port', 'path', 'protocol']) - getReqOpts.method = 'GET' - const _100kbmd5 = crypto.createHash('md5').update(_100_byte).digest('hex') - - const hash = crypto.createHash('md5') - if (getReqOpts.protocol === 'https:') { - mobileClientReqWithProtocol = https - } - const request = mobileClientReqWithProtocol.request(getReqOpts, (response) => { - // if delete marker. method not allowed. - if (response.statusCode !== 200) { - return new Error(`error on get : ${response.statusCode}`) - } - response.on('error', () => { - return done() - }) - response.on('end', () => { - const hashValue = hash.digest('hex') - if (hashValue === _100kbmd5) { - done() - } else { - return done(new Error('Unable to retrieve version of an object using presignedGetObject')) - } - }) - response.on('data', (data) => { - hash.update(data) - }) - }) - request.on('error', () => { - return done() - }) - request.end() - }, - ) - } else { - done() - } - }, - ) - - step(`removeObjects(bucketName, objectsList)_bucketName:${versionedBucketName}`, (done) => { - if (isVersioningSupported) { - client.removeObjects(versionedBucketName, objectsList, function (e) { - if (e) { - done(e) - } - done() - }) - } else { - done() - } - }) - }) - - describe('Object Lock API Bucket Options Test', () => { - // Isolate the bucket/object for easy debugging and tracking. - // Gateway mode does not support this header. - - describe('Object Lock support makeBucket API Tests', function () { - const lockEnabledBucketName = 'minio-js-test-lock-mb-' + uuid.v4() - let isFeatureSupported = false - step(`Check if bucket with object lock can be created:_bucketName:${lockEnabledBucketName}`, (done) => { - client.makeBucket(lockEnabledBucketName, { ObjectLocking: true }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - isFeatureSupported = true - if (err) { - return done(err) - } - done() - }) - }) - - step(`Get lock config on a bucket:_bucketName:${lockEnabledBucketName}`, (done) => { - if (isFeatureSupported) { - client.getObjectLockConfig(lockEnabledBucketName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - } else { - done() - } - }) - - step(`Check if bucket can be deleted:_bucketName:${lockEnabledBucketName}`, (done) => { - client.removeBucket(lockEnabledBucketName, (err) => { - if (isFeatureSupported) { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - } else { - done() - } - }) - }) - }) - - describe('Object Lock support Set/Get API Tests', function () { - const lockConfigBucketName = 'minio-js-test-lock-conf-' + uuid.v4() - let isFeatureSupported = false - step(`Check if bucket with object lock can be created:_bucketName:${lockConfigBucketName}`, (done) => { - client.makeBucket(lockConfigBucketName, { ObjectLocking: true }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - isFeatureSupported = true - if (err) { - return done(err) - } - done() - }) - }) - step(`Update or replace lock config on a bucket:_bucketName:${lockConfigBucketName}`, (done) => { - if (isFeatureSupported) { - client.setObjectLockConfig( - lockConfigBucketName, - { mode: 'GOVERNANCE', unit: 'Years', validity: 2 }, - (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }, - ) - } else { - done() - } - }) - step(`Get lock config on a bucket:_bucketName:${lockConfigBucketName}`, (done) => { - if (isFeatureSupported) { - client.getObjectLockConfig(lockConfigBucketName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - } else { - done() - } - }) - - step(`Set lock config on a bucket:_bucketName:${lockConfigBucketName}`, (done) => { - if (isFeatureSupported) { - client.setObjectLockConfig(lockConfigBucketName, {}, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - } else { - done() - } - }) - step(`Get and verify lock config on a bucket after reset/update:_bucketName:${lockConfigBucketName}`, (done) => { - if (isFeatureSupported) { - client.getObjectLockConfig(lockConfigBucketName, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - }) - } else { - done() - } - }) - - step(`Check if bucket can be deleted:_bucketName:${lockConfigBucketName}`, (done) => { - client.removeBucket(lockConfigBucketName, (err) => { - if (isFeatureSupported) { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - done() - } else { - done() - } - }) - }) - }) - }) - - describe('Object retention API Tests', () => { - // Isolate the bucket/object for easy debugging and tracking. - // Gateway mode does not support this header. - - describe('Object retention get/set API Test', function () { - const objRetentionBucket = 'minio-js-test-retention-' + uuid.v4() - const retentionObjName = 'RetentionObject' - let isFeatureSupported = false - let versionId = null - - step(`Check if bucket with object lock can be created:_bucketName:${objRetentionBucket}`, (done) => { - client.makeBucket(objRetentionBucket, { ObjectLocking: true }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - isFeatureSupported = true - if (err) { - return done(err) - } - done() - }) - }) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}, stream:100Kib_`, - (done) => { - // Put two versions of the same object. - if (isFeatureSupported) { - client - .putObject(objRetentionBucket, retentionObjName, readableStream(_1byte), _1byte.length, {}) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - - step( - `statObject(bucketName, objectName, statOpts)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}`, - (done) => { - if (isFeatureSupported) { - client.statObject(objRetentionBucket, retentionObjName, {}, (e, res) => { - versionId = res.versionId - done() - }) - } else { - done() - } - }, - ) - - step( - `putObjectRetention(bucketName, objectName, putOpts)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}`, - (done) => { - // Put two versions of the same object. - if (isFeatureSupported) { - let expirationDate = new Date() - // set expiry to start of next day. - expirationDate.setDate(expirationDate.getDate() + 1) - expirationDate.setUTCHours(0, 0, 0, 0) // Should be start of the day.(midnight) - - client - .putObjectRetention(objRetentionBucket, retentionObjName, { - governanceBypass: true, - mode: 'GOVERNANCE', - retainUntilDate: expirationDate.toISOString(), - versionId: versionId, - }) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - - step( - `getObjectRetention(bucketName, objectName, getOpts)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}`, - (done) => { - if (isFeatureSupported) { - client.getObjectRetention(objRetentionBucket, retentionObjName, { versionId: versionId }, () => { - done() - }) - } else { - done() - } - }, - ) - - step( - `removeObject(bucketName, objectName, removeOpts)_bucketName:${objRetentionBucket}, objectName:${retentionObjName}`, - (done) => { - if (isFeatureSupported) { - client.removeObject( - objRetentionBucket, - retentionObjName, - { versionId: versionId, governanceBypass: true }, - () => { - done() - }, - ) - } else { - done() - } - }, - ) - - step(`removeBucket(bucketName, )_bucketName:${objRetentionBucket}`, (done) => { - if (isFeatureSupported) { - client.removeBucket(objRetentionBucket, () => { - done() - }) - } else { - done() - } - }) - }) - }) - - describe('Bucket Encryption Related APIs', () => { - // Isolate the bucket/object for easy debugging and tracking. - // this is not supported in gateway mode. - const encBucketName = 'minio-js-test-bucket-enc-' + uuid.v4() - before((done) => client.makeBucket(encBucketName, '', done)) - after((done) => client.removeBucket(encBucketName, done)) - - const encObjName = 'datafile-100-kB' - const encObjFileContent = Buffer.alloc(100 * 1024, 0) - let isEncryptionSupported = false - - step(`Set Encryption on a bucket:_bucketName:${encBucketName}`, (done) => { - // setBucketEncryption succeeds in NAS mode. - const buckEncPromise = client.setBucketEncryption(encBucketName) - buckEncPromise - .then(() => { - done() - }) - .catch(() => { - done() - }) - }) - - step(`Get encryption of a bucket:_bucketName:${encBucketName}`, (done) => { - const getBucEncObj = client.getBucketEncryption(encBucketName) - getBucEncObj - .then(() => { - done() - }) - .catch((err) => { - if (err && err.code === 'NotImplemented') { - isEncryptionSupported = false - return done() - } - if (err && err.code === 'ServerSideEncryptionConfigurationNotFoundError') { - return done() - } - if (err) { - return done(err) - } - done() - }) - }) - - step( - `Put an object to check for default encryption bucket:_bucketName:${encBucketName}, _objectName:${encObjName}`, - (done) => { - if (isEncryptionSupported) { - const putObjPromise = client.putObject(encBucketName, encObjName, encObjFileContent) - putObjPromise - .then(() => { - done() - }) - .catch(() => { - done() - }) - } else { - done() - } - }, - ) - - step( - `Stat of an object to check for default encryption applied on a bucket:_bucketName:${encBucketName}, _objectName:${encObjName}`, - (done) => { - if (isEncryptionSupported) { - const statObjPromise = client.statObject(encBucketName, encObjName) - statObjPromise - .then(() => { - done() - }) - .catch(() => { - done() - }) - } else { - done() - } - }, - ) - - step( - `Stat of an object to check for default encryption applied on a bucket:_bucketName:${encBucketName}`, - (done) => { - if (isEncryptionSupported) { - const getBuckEnc = client.getBucketEncryption(encBucketName) - getBuckEnc - .then(() => { - done() - }) - .catch(() => { - done() - }) - } else { - done() - } - }, - ) - - step(`Remove object on a bucket:_bucketName:${encBucketName}, _objectName:${encObjName}`, (done) => { - if (isEncryptionSupported) { - const removeObj = client.removeObject(encBucketName, encObjName) - removeObj - .then(() => { - done() - }) - .catch(() => { - done() - }) - } else { - done() - } - }) - - step(`Remove encryption on a bucket:_bucketName:${encBucketName}`, (done) => { - if (isEncryptionSupported) { - const removeObj = client.removeBucketEncryption(encBucketName) - removeObj - .then(() => { - done() - }) - .catch(() => { - done() - }) - } else { - done() - } - }) - step(`Get encryption on a bucket:_bucketName:${encBucketName}`, (done) => { - if (isEncryptionSupported) { - const getBuckEnc = client.getBucketEncryption(encBucketName) - getBuckEnc - .then(() => { - done() - }) - .catch(() => { - done() - }) - } else { - done() - } - }) - }) - - describe('Bucket Replication API Tests', () => { - // TODO - As of now, there is no api to get arn programmatically to setup replication through APIs and verify. - // Please refer to minio server documentation and mc cli. - // https://min.io/docs/minio/linux/administration/bucket-replication.html - // https://min.io/docs/minio/linux/reference/minio-mc/mc-replicate-add.html - }) - - describe('Object Legal hold API Tests', () => { - // Isolate the bucket/object for easy debugging and tracking. - // Gateway mode does not support this header. - let versionId = null - describe('Object Legal hold get/set API Test', function () { - const objLegalHoldBucketName = 'minio-js-test-legalhold-' + uuid.v4() - const objLegalHoldObjName = 'LegalHoldObject' - let isFeatureSupported = false - - step(`Check if bucket with object lock can be created:_bucketName:${objLegalHoldBucketName}`, (done) => { - client.makeBucket(objLegalHoldBucketName, { ObjectLocking: true }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - isFeatureSupported = true - if (err) { - return done(err) - } - done() - }) - }) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}, stream:100Kib_`, - (done) => { - if (isFeatureSupported) { - client - .putObject(objLegalHoldBucketName, objLegalHoldObjName, readableStream(_1byte), _1byte.length, {}) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - - step( - `statObject(bucketName, objectName, statOpts)_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, - (done) => { - if (isFeatureSupported) { - client.statObject(objLegalHoldBucketName, objLegalHoldObjName, {}, (e, res) => { - versionId = res.versionId - done() - }) - } else { - done() - } - }, - ) - - step( - `setObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, - (done) => { - if (isFeatureSupported) { - client.setObjectLegalHold(objLegalHoldBucketName, objLegalHoldObjName, () => { - done() - }) - } else { - done() - } - }, - ) - - step( - `setObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, - (done) => { - if (isFeatureSupported) { - client.setObjectLegalHold( - objLegalHoldBucketName, - objLegalHoldObjName, - { status: 'ON', versionId: versionId }, - () => { - done() - }, - ) - } else { - done() - } - }, - ) - - step( - `getObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, - (done) => { - if (isFeatureSupported) { - client.getObjectLegalHold(objLegalHoldBucketName, objLegalHoldObjName, () => { - done() - }) - } else { - done() - } - }, - ) - - step( - `setObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, - (done) => { - if (isFeatureSupported) { - client.setObjectLegalHold( - objLegalHoldBucketName, - objLegalHoldObjName, - { status: 'OFF', versionId: versionId }, - () => { - done() - }, - ) - } else { - done() - } - }, - ) - - step( - `getObjectLegalHold(bucketName, objectName, setOpts={})_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, - (done) => { - if (isFeatureSupported) { - client.getObjectLegalHold(objLegalHoldBucketName, objLegalHoldObjName, { versionId: versionId }, () => { - done() - }) - } else { - done() - } - }, - ) - - step( - `removeObject(bucketName, objectName, removeOpts)_bucketName:${objLegalHoldBucketName}, objectName:${objLegalHoldObjName}`, - (done) => { - if (isFeatureSupported) { - client.removeObject( - objLegalHoldBucketName, - objLegalHoldObjName, - { versionId: versionId, governanceBypass: true }, - () => { - done() - }, - ) - } else { - done() - } - }, - ) - - step(`removeBucket(bucketName, )_bucketName:${objLegalHoldBucketName}`, (done) => { - if (isFeatureSupported) { - client.removeBucket(objLegalHoldBucketName, () => { - done() - }) - } else { - done() - } - }) - }) - }) - - describe('Object Name special characters test without Prefix', () => { - // Isolate the bucket/object for easy debugging and tracking. - const bucketNameForSpCharObjects = 'minio-js-test-obj-spwpre-' + uuid.v4() - before((done) => client.makeBucket(bucketNameForSpCharObjects, '', done)) - after((done) => client.removeBucket(bucketNameForSpCharObjects, done)) - - // Reference:: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html - // Host OS compatible File name characters/ file names. - - let objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 \u0040 amȡȹɆple&0a!-_.*'()&$@=;:+,?<>.pdf" - if (isWindowsPlatform) { - objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 u0040 amȡȹɆple&0a!-_.'()&$@=;+,.pdf" - } - - const objectContents = Buffer.alloc(100 * 1024, 0) - - describe('Without Prefix Test', function () { - step( - `putObject(bucketName, objectName, stream)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameSpecialChars}, stream:100Kib_`, - (done) => { - client - .putObject(bucketNameForSpCharObjects, objectNameSpecialChars, objectContents) - .then(() => { - done() - }) - .catch(done) - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", true`, - (done) => { - const listStream = client.listObjects(bucketNameForSpCharObjects, '', true) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - if (listedObject.name === objectNameSpecialChars) { - done() - } else { - return done(new Error(`Expected object Name: ${objectNameSpecialChars}: received:${listedObject.name}`)) - } - }) - listStream.on('error', function (e) { - done(e) - }) - }, - ) - - step( - `listObjectsV2(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", true`, - (done) => { - const listStream = client.listObjectsV2(bucketNameForSpCharObjects, '', true) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - if (listedObject.name === objectNameSpecialChars) { - done() - } else { - return done(new Error(`Expected object Name: ${objectNameSpecialChars}: received:${listedObject.name}`)) - } - }) - - listStream.on('error', function (e) { - done(e) - }) - }, - ) - step( - `extensions.listObjectsV2WithMetadata(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", true`, - (done) => { - const listStream = client.extensions.listObjectsV2WithMetadata(bucketNameForSpCharObjects, '', true) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - if (listedObject.name === objectNameSpecialChars) { - done() - } else { - return done(new Error(`Expected object Name: ${objectNameSpecialChars}: received:${listedObject.name}`)) - } - }) - - listStream.on('error', function (e) { - done(e) - }) - }, - ) - - step( - `getObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameSpecialChars}`, - (done) => { - client - .getObject(bucketNameForSpCharObjects, objectNameSpecialChars) - .then((stream) => { - stream.on('data', function () {}) - stream.on('end', done) - }) - .catch(done) - }, - ) - - step( - `statObject(bucketName, objectName, cb)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameSpecialChars}`, - (done) => { - client.statObject(bucketNameForSpCharObjects, objectNameSpecialChars, (e) => { - if (e) { - return done(e) - } - done() - }) - }, - ) - - step( - `removeObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameSpecialChars}`, - (done) => { - client - .removeObject(bucketNameForSpCharObjects, objectNameSpecialChars) - .then(() => done()) - .catch(done) - }, - ) - }) - }) - describe('Object Name special characters test with a Prefix', () => { - // Isolate the bucket/object for easy debugging and tracking. - const bucketNameForSpCharObjects = 'minio-js-test-obj-spnpre-' + uuid.v4() - before((done) => client.makeBucket(bucketNameForSpCharObjects, '', done)) - after((done) => client.removeBucket(bucketNameForSpCharObjects, done)) - - // Reference:: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html - let objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 \u0040 amȡȹɆple&0a!-_.*'()&$@=;:+,?<>.pdf" - if (isWindowsPlatform) { - objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 u0040 amȡȹɆple&0a!-_.'()&$@=;+,.pdf" - } - const prefix = 'test' - const objectNameWithPrefixForSpecialChars = `${prefix}/${objectNameSpecialChars}` - - const objectContents = Buffer.alloc(100 * 1024, 0) - - describe('With Prefix Test', function () { - step( - `putObject(bucketName, objectName, stream)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefixForSpecialChars}, stream:100Kib`, - (done) => { - client - .putObject(bucketNameForSpCharObjects, objectNameWithPrefixForSpecialChars, objectContents) - .then(() => { - done() - }) - .catch(done) - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:${prefix}, recursive:true`, - (done) => { - const listStream = client.listObjects(bucketNameForSpCharObjects, prefix, true) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - if (listedObject.name === objectNameWithPrefixForSpecialChars) { - done() - } else { - return done( - new Error( - `Expected object Name: ${objectNameWithPrefixForSpecialChars}: received:${listedObject.name}`, - ), - ) - } - }) - listStream.on('error', function (e) { - done(e) - }) - }, - ) - - step( - `listObjectsV2(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:${prefix}, recursive:true`, - (done) => { - const listStream = client.listObjectsV2(bucketNameForSpCharObjects, prefix, true) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - if (listedObject.name === objectNameWithPrefixForSpecialChars) { - done() - } else { - return done( - new Error( - `Expected object Name: ${objectNameWithPrefixForSpecialChars}: received:${listedObject.name}`, - ), - ) - } - }) - listStream.on('error', function (e) { - done(e) - }) - }, - ) - - step( - `extensions.listObjectsV2WithMetadata(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:${prefix}, recursive:true`, - (done) => { - const listStream = client.extensions.listObjectsV2WithMetadata(bucketNameForSpCharObjects, prefix, true) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - if (listedObject.name === objectNameWithPrefixForSpecialChars) { - done() - } else { - return done( - new Error( - `Expected object Name: ${objectNameWithPrefixForSpecialChars}: received:${listedObject.name}`, - ), - ) - } - }) - listStream.on('error', function (e) { - done(e) - }) - }, - ) - - step( - `getObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName_:${objectNameWithPrefixForSpecialChars}`, - (done) => { - client - .getObject(bucketNameForSpCharObjects, objectNameWithPrefixForSpecialChars) - .then((stream) => { - stream.on('data', function () {}) - stream.on('end', done) - }) - .catch(done) - }, - ) - - step( - `statObject(bucketName, objectName, cb)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefixForSpecialChars}`, - (done) => { - client.statObject(bucketNameForSpCharObjects, objectNameWithPrefixForSpecialChars, (e) => { - if (e) { - return done(e) - } - done() - }) - }, - ) - - step( - `removeObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefixForSpecialChars}`, - (done) => { - client - .removeObject(bucketNameForSpCharObjects, objectNameWithPrefixForSpecialChars) - .then(() => done()) - .catch(done) - }, - ) - }) - }) - - describe('Assume Role Tests', () => { - // Run only in local environment. - const bucketName = 'minio-js-test-assume-role' + uuid.v4() - before((done) => client.makeBucket(bucketName, '', done)) - after((done) => client.removeBucket(bucketName, done)) - - const objName = 'datafile-100-kB' - const objContent = Buffer.alloc(100 * 1024, 0) - - const canRunAssumeRoleTest = clientConfigParams.endPoint.includes('localhost') - const stsEndPoint = 'http://localhost:9000' - - try { - if (canRunAssumeRoleTest) { - // Creates a new Client with assume role provider for testing. - const assumeRoleProvider = new AssumeRoleProvider({ - stsEndpoint: stsEndPoint, - accessKey: client.accessKey, - secretKey: client.secretKey, - }) - - const aRoleConf = Object.assign({}, clientConfigParams, { credentialsProvider: assumeRoleProvider }) - - const assumeRoleClient = new minio.Client(aRoleConf) - assumeRoleClient.region = server_region - - describe('Put an Object', function () { - step( - `Put an object with assume role credentials: bucket:_bucketName:${bucketName}, _objectName:${objName}`, - (done) => { - const putObjPromise = assumeRoleClient.putObject(bucketName, objName, objContent) - putObjPromise - .then(() => { - done() - }) - .catch(done) - }, - ) - - step(`Remove an Object with assume role credentials:${bucketName}, _objectName:${objName}`, (done) => { - const removeObjPromise = assumeRoleClient.removeObject(bucketName, objName) - removeObjPromise - .then(() => { - done() - }) - .catch(done) - }) - }) - } - } catch (err) { - // eslint-disable-next-line no-console - console.error('Error in Assume Role API.', err) - } - }) - - describe('Put Object Response test with multipart on an Un versioned bucket:', () => { - const bucketToTestMultipart = 'minio-js-test-put-multiuv-' + uuid.v4() - - before((done) => client.makeBucket(bucketToTestMultipart, '', done)) - after((done) => client.removeBucket(bucketToTestMultipart, done)) - - // Non multipart Test - step( - `putObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_100kbObjectName}, stream:100KB`, - (done) => { - const stream = readableStream(_100kb) - client.putObject(bucketToTestMultipart, _100kbObjectName, stream, metaData, (e, res) => { - if (e) { - done(e) - } - if (res.versionId === null && res.etag) { - done() - } else { - done( - new Error( - `Incorrect response format, expected: {versionId:null, etag:"some-etag-hash"} received:${JSON.stringify( - res, - )}`, - ), - ) - } - }) - }, - ) - step( - `removeObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_100kbObjectName}`, - (done) => { - client - .removeObject(bucketToTestMultipart, _100kbObjectName) - .then(() => done()) - .catch(done) - }, - ) - - // Multipart Test - step( - `putObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_65mbObjectName}, stream:65MB`, - (done) => { - const stream = readableStream(_65mb) - client.putObject(bucketToTestMultipart, _65mbObjectName, stream, metaData, (e, res) => { - if (e) { - done(e) - } - if (res.versionId === null && res.etag) { - done() - } else { - done( - new Error( - `Incorrect response format, expected: {versionId:null, etag:"some-etag-hash"} received:${JSON.stringify( - res, - )}`, - ), - ) - } - }) - }, - ) - step( - `removeObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_65mbObjectName}`, - (done) => { - client - .removeObject(bucketToTestMultipart, _65mbObjectName) - .then(() => done()) - .catch(done) - }, - ) - }) - - describe('Put Object Response test with multipart on Versioned bucket:', () => { - const bucketToTestMultipart = 'minio-js-test-put-multiv-' + uuid.v4() - let isVersioningSupported = false - let versionedObjectRes = null - let versionedMultiPartObjectRes = null - - before((done) => - client.makeBucket(bucketToTestMultipart, '', () => { - client.setBucketVersioning(bucketToTestMultipart, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - isVersioningSupported = true - done() - }) - }), - ) - after((done) => client.removeBucket(bucketToTestMultipart, done)) - - // Non multipart Test - step( - `putObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_100kbObjectName}, stream:100KB`, - (done) => { - if (isVersioningSupported) { - const stream = readableStream(_100kb) - client.putObject(bucketToTestMultipart, _100kbObjectName, stream, metaData, (e, res) => { - if (e) { - done(e) - } - if (res.versionId && res.etag) { - versionedObjectRes = res - done() - } else { - done( - new Error( - `Incorrect response format, expected: {versionId:'some-version-hash', etag:"some-etag-hash"} received:${JSON.stringify( - res, - )}`, - ), - ) - } - }) - } else { - done() - } - }, - ) - step( - `removeObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_100kbObjectName}`, - (done) => { - if (isVersioningSupported) { - client - .removeObject(bucketToTestMultipart, _100kbObjectName, { versionId: versionedObjectRes.versionId }) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - - // Multipart Test - step( - `putObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_65mbObjectName}, stream:65MB`, - (done) => { - if (isVersioningSupported) { - const stream = readableStream(_65mb) - client.putObject(bucketToTestMultipart, _65mbObjectName, stream, metaData, (e, res) => { - if (e) { - done(e) - } - if (res.versionId && res.etag) { - versionedMultiPartObjectRes = res - done() - } else { - done( - new Error( - `Incorrect response format, expected: {versionId:null, etag:"some-etag-hash"} received:${JSON.stringify( - res, - )}`, - ), - ) - } - }) - } else { - done() - } - }, - ) - step( - `removeObject(bucketName, objectName, stream)_bucketName:${bucketToTestMultipart}, _objectName:${_65mbObjectName}`, - (done) => { - if (isVersioningSupported) { - client - .removeObject(bucketToTestMultipart, _65mbObjectName, { versionId: versionedMultiPartObjectRes.versionId }) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - }) - describe('Compose Object API Tests', () => { - /** - * Steps: - * 1. Generate a 100MB file in temp dir - * 2. Split into 26 MB parts in temp dir - * 3. Upload parts to bucket - * 4. Compose into a single object in the same bucket. - * 5. Remove the file parts (Clean up) - * 6. Remove the file itself (Clean up) - * 7. Remove bucket. (Clean up) - */ - - var _100mbFileToBeSplitAndComposed = Buffer.alloc(100 * 1024 * 1024, 0) - let composeObjectTestBucket = 'minio-js-test-compose-obj-' + uuid.v4() - before((done) => client.makeBucket(composeObjectTestBucket, '', done)) - after((done) => client.removeBucket(composeObjectTestBucket, done)) - - const composedObjName = '_100-mb-file-to-test-compose' - const tmpSubDir = `${tmpDir}/compose` - var fileToSplit = `${tmpSubDir}/${composedObjName}` - let partFilesNamesWithPath = [] - let partObjNameList = [] - let isSplitSuccess = false - step(`Create a local file of 100 MB and split `, (done) => { - try { - fs.writeFileSync(fileToSplit, _100mbFileToBeSplitAndComposed) - // 100 MB split into 26 MB part size. - splitFile - .splitFileBySize(fileToSplit, 26 * 1024 * 1024) - .then((names) => { - partFilesNamesWithPath = names - isSplitSuccess = true - done() - }) - .catch(() => { - done() - }) - } catch (err) { - done() - } - }) - - step(`Upload parts to Bucket_bucketName:${composeObjectTestBucket}, _objectName:${partObjNameList}`, (done) => { - if (isSplitSuccess) { - const fileSysToBucket = partFilesNamesWithPath.map((partFileName) => { - const partObjName = partFileName.substr((tmpSubDir + '/').length) - partObjNameList.push(partObjName) - return client.fPutObject(composeObjectTestBucket, partObjName, partFileName, {}) - }) - - Promise.all(fileSysToBucket) - .then(() => { - done() - }) - .catch(done) - } else { - done() - } - }) - - step( - `composeObject(destObjConfig, sourceObjList, cb)::_bucketName:${composeObjectTestBucket}, _objectName:${composedObjName}`, - (done) => { - if (isSplitSuccess) { - const sourcePartObjList = partObjNameList.map((partObjName) => { - return new CopySourceOptions({ - Bucket: composeObjectTestBucket, - Object: partObjName, - }) - }) - - const destObjConfig = new CopyDestinationOptions({ - Bucket: composeObjectTestBucket, - Object: composedObjName, - }) - - client.composeObject(destObjConfig, sourcePartObjList).then((e) => { - if (e) { - return done(e) - } - done() - }) - } else { - done() - } - }, - ) - - step( - `statObject(bucketName, objectName, cb)::_bucketName:${composeObjectTestBucket}, _objectName:${composedObjName}`, - (done) => { - if (isSplitSuccess) { - client.statObject(composeObjectTestBucket, composedObjName, (e) => { - if (e) { - return done(e) - } - done() - }) - } else { - done() - } - }, - ) - - step( - `Remove Object Parts from Bucket::_bucketName:${composeObjectTestBucket}, _objectNames:${partObjNameList}`, - (done) => { - if (isSplitSuccess) { - const sourcePartObjList = partObjNameList.map((partObjName) => { - return client.removeObject(composeObjectTestBucket, partObjName) - }) - - Promise.all(sourcePartObjList) - .then(() => { - done() - }) - .catch(done) - } else { - done() - } - }, - ) - - step( - `Remove Composed target Object::_bucketName:${composeObjectTestBucket}, objectName:${composedObjName}`, - (done) => { - if (isSplitSuccess) { - client - .removeObject(composeObjectTestBucket, composedObjName) - .then(() => { - done() - }) - .catch(done) - } else { - done() - } - }, - ) - - step('Clean up temp directory part files', (done) => { - if (isSplitSuccess) { - removeDirAndFiles(tmpSubDir) - } - done() - }) - }) - - describe('Special Characters test on a prefix and an object', () => { - // Isolate the bucket/object for easy debugging and tracking. - const bucketNameForSpCharObjects = 'minio-js-test-obj-sppre' + uuid.v4() - before((done) => client.makeBucket(bucketNameForSpCharObjects, '', done)) - after((done) => client.removeBucket(bucketNameForSpCharObjects, done)) - - const specialCharPrefix = 'SpecialMenùäöüexPrefix/' - - let objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 \u0040 amȡȹɆple&0a!-_.*'()&$@=;:+,?<>.pdf" - if (isWindowsPlatform) { - objectNameSpecialChars = "äöüex ®©µÄÆÐÕæŒƕƩDž 01000000 0x40 u0040 amȡȹɆple&0a!-_.'()&$@=;+,.pdf" - } - - const objectNameWithPrefix = `${specialCharPrefix}${objectNameSpecialChars}` - - const objectContents = Buffer.alloc(100 * 1024, 0) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefix}, stream:100Kib`, - (done) => { - client - .putObject(bucketNameForSpCharObjects, objectNameWithPrefix, objectContents) - .then(() => { - done() - }) - .catch(done) - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", false`, - (done) => { - const listStream = client.listObjects(bucketNameForSpCharObjects, '', false) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - if (listedObject.prefix === specialCharPrefix) { - done() - } else { - return done(new Error(`Expected Prefix Name: ${specialCharPrefix}: received:${listedObject.prefix}`)) - } - }) - listStream.on('error', function (e) { - done(e) - }) - }, - ) - - step( - `listObjectsV2(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", false`, - (done) => { - const listStream = client.listObjectsV2(bucketNameForSpCharObjects, '', false) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - // verify that the prefix special characters are handled - if (listedObject.prefix === specialCharPrefix) { - done() - } else { - return done(new Error(`Expected object Name: ${specialCharPrefix}: received:${listedObject.prefix}`)) - } - }) - - listStream.on('error', function (e) { - done(e) - }) - }, - ) - - step( - `extensions.listObjectsV2WithMetadata(bucketName, prefix, recursive)_bucketName:${bucketNameForSpCharObjects}, prefix:"", false`, - (done) => { - const listStream = client.extensions.listObjectsV2WithMetadata(bucketNameForSpCharObjects, '', false) - let listedObject = null - listStream.on('data', function (obj) { - listedObject = obj - }) - listStream.on('end', () => { - if (listedObject.prefix === specialCharPrefix) { - done() - } else { - return done(new Error(`Expected object Name: ${specialCharPrefix}: received:${listedObject.prefix}`)) - } - }) - - listStream.on('error', function (e) { - done(e) - }) - }, - ) - - step( - `getObject(bucketName, objectName)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefix}`, - (done) => { - client - .getObject(bucketNameForSpCharObjects, objectNameWithPrefix) - .then((stream) => { - stream.on('data', function () {}) - stream.on('end', done) - }) - .catch(done) - }, - ) - - step( - `statObject(bucketName, objectName, cb)_bucketName:${bucketNameForSpCharObjects}, _objectName:${objectNameWithPrefix}`, - (done) => { - client.statObject(bucketNameForSpCharObjects, objectNameWithPrefix, (e) => { - if (e) { - return done(e) - } - done() - }) - }, - ) - step( - `removeObject(bucketName, objectName)_bucketName:${objectNameWithPrefix}, _objectName:${objectNameWithPrefix}`, - (done) => { - client - .removeObject(bucketNameForSpCharObjects, objectNameWithPrefix) - .then(() => done()) - .catch(done) - }, - ) - }) - describe('Test listIncompleteUploads (Multipart listing) with special characters', () => { - const specialCharPrefix = 'SpecialMenùäöüexPrefix/' - const objectNameSpecialChars = 'äöüex.pdf' - const spObjWithPrefix = `${specialCharPrefix}${objectNameSpecialChars}` - const spBucketName = 'minio-js-test-lin-sppre' + uuid.v4() - - before((done) => client.makeBucket(spBucketName, '', done)) - after((done) => client.removeBucket(spBucketName, done)) - - step( - `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${spBucketName}, objectName:${spObjWithPrefix}, metaData:${metaData}`, - (done) => { - client.initiateNewMultipartUpload(spBucketName, spObjWithPrefix, metaData, done) - }, - ) - - step( - `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${spBucketName}, prefix:${spObjWithPrefix}, recursive: true_`, - function (done) { - // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. - let hostSkipList = ['s3.amazonaws.com'] - if (!hostSkipList.includes(client.host)) { - done() - return - } - - var found = false - client - .listIncompleteUploads(spBucketName, spObjWithPrefix, true) - .on('error', (e) => done(e)) - .on('data', (data) => { - if (data.key === spObjWithPrefix) { - found = true - } - }) - .on('end', () => { - if (found) { - return done() - } - done(new Error(`${spObjWithPrefix} not found during listIncompleteUploads`)) - }) - }, - ) - - step( - `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${spBucketName}, recursive: true_`, - function (done) { - // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. - let hostSkipList = ['s3.amazonaws.com'] - if (!hostSkipList.includes(client.host)) { - done() - return - } - - var found = false - client - .listIncompleteUploads(spBucketName, '', false) - .on('error', (e) => done(e)) - .on('data', (data) => { - // check the prefix - if (data.prefix === specialCharPrefix) { - found = true - } - }) - .on('end', () => { - if (found) { - return done() - } - done(new Error(`${specialCharPrefix} not found during listIncompleteUploads`)) - }) - }, - ) - step( - `removeIncompleteUploads(bucketName, prefix)_bucketName:${spBucketName}, prefix:${spObjWithPrefix}_`, - (done) => { - client.removeIncompleteUpload(spBucketName, spObjWithPrefix).then(done).catch(done) - }, - ) - }) - describe('Select Object content API Test', function () { - const selObjContentBucket = 'minio-js-test-sel-object-' + uuid.v4() - const selObject = 'SelectObjectContent' - // Isolate the bucket/object for easy debugging and tracking. - before((done) => client.makeBucket(selObjContentBucket, '', done)) - after((done) => client.removeBucket(selObjContentBucket, done)) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${selObjContentBucket}, objectName:${selObject}, stream:csv`, - (done) => { - // Save a CSV file so that we can query later to test the results. - client - .putObject( - selObjContentBucket, - selObject, - 'Name,PhoneNumber,City,Occupation\n' + - 'Sam,(949) 123-45567,Irvine,Solutions Architect\n' + - 'Vinod,(949) 123-4556,Los Angeles,Solutions Architect\n' + - 'Jeff,(949) 123-45567,Seattle,AWS Evangelist\n' + - 'Jane,(949) 123-45567,Chicago,Developer\n' + - 'Sean,(949) 123-45567,Chicago,Developer\n' + - 'Mary,(949) 123-45567,Chicago,Developer\n' + - 'Kate,(949) 123-45567,Chicago,Developer', - {}, - ) - .then(() => { - done() - }) - .catch(done) - }, - ) - - step( - `selectObjectContent(bucketName, objectName, selectOpts)_bucketName:${selObjContentBucket}, objectName:${selObject}`, - (done) => { - const selectOpts = { - expression: 'SELECT * FROM s3object s where s."Name" = \'Jane\'', - expressionType: 'SQL', - inputSerialization: { - CSV: { FileHeaderInfo: 'Use', RecordDelimiter: '\n', FieldDelimiter: ',' }, - CompressionType: 'NONE', - }, - outputSerialization: { CSV: { RecordDelimiter: '\n', FieldDelimiter: ',' } }, - requestProgress: { Enabled: true }, - } - - client - .selectObjectContent(selObjContentBucket, selObject, selectOpts) - .then((result) => { - // verify the select query result string. - if (result.getRecords().toString() === 'Jane,(949) 123-45567,Chicago,Developer\n') { - // \n for csv line ending. - done() - } else { - return done( - new Error( - `Expected Result did not match received:${result - .getRecords() - .toString()} expected:"Jane,(949) 123-45567,Chicago,Developer\n"`, - ), - ) - } - }) - .catch(done) - }, - ) - - step(`Remove Object post select of content:_bucketName:${selObjContentBucket},objectName:${selObject}`, (done) => { - client - .removeObject(selObjContentBucket, selObject) - .then(() => done()) - .catch(done) - }) - }) - - describe('Force Deletion of objects with versions', function () { - // Isolate the bucket/object for easy debugging and tracking. - const fdWithVerBucket = 'minio-js-fd-version-' + uuid.v4() - const fdObjectName = 'datafile-100-kB' - const fdObject = dataDir ? fs.readFileSync(dataDir + '/' + fdObjectName) : Buffer.alloc(100 * 1024, 0) - - before((done) => client.makeBucket(fdWithVerBucket, '', done)) - after((done) => client.removeBucket(fdWithVerBucket, done)) - - describe('Test for force removal of multiple versions', function () { - let isVersioningSupported = false - const objVersionList = [] - step( - `setBucketVersioning(bucketName, versionConfig):_bucketName:${fdWithVerBucket},versionConfig:{Status:"Enabled"} `, - (done) => { - client.setBucketVersioning(fdWithVerBucket, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - isVersioningSupported = true - done() - }) - }, - ) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${fdWithVerBucket}, objectName:${fdObjectName}, stream:100Kib_`, - (done) => { - if (isVersioningSupported) { - client - .putObject(fdWithVerBucket, fdObjectName, fdObject) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - // Put two versions of the same object. - step( - `putObject(bucketName, objectName, stream)_bucketName:${fdWithVerBucket}, objectName:${fdObjectName}, stream:100Kib_`, - (done) => { - if (isVersioningSupported) { - client - .putObject(fdWithVerBucket, fdObjectName, fdObject) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - - step( - `removeObject(bucketName, objectList, removeOpts)_bucketName:${fdWithVerBucket}_Remove ${objVersionList.length} objects`, - (done) => { - if (isVersioningSupported) { - client.removeObject(fdWithVerBucket, fdObjectName, { forceDelete: true }, () => { - done() - }) - } else { - done() - } - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${fdWithVerBucket}, prefix: '', recursive:true_`, - (done) => { - if (isVersioningSupported) { - client - .listObjects(fdWithVerBucket, '', true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(0, objVersionList.length)) { - return done() - } - return done(new Error(`listObjects lists ${objVersionList.length} objects, expected 0`)) - }) - .on('data', (data) => { - objVersionList.push(data) - }) - } else { - done() - } - }, - ) - }) - }) - - describe('Force Deletion of prefix with versions', function () { - // Isolate the bucket/object for easy debugging and tracking. - const fdPrefixBucketName = 'minio-js-fd-version-' + uuid.v4() - const fdPrefixObjName = 'my-prefix/datafile-100-kB' - const fdPrefixObject = dataDir ? fs.readFileSync(dataDir + '/' + fdPrefixObjName) : Buffer.alloc(100 * 1024, 0) - - before((done) => client.makeBucket(fdPrefixBucketName, '', done)) - after((done) => client.removeBucket(fdPrefixBucketName, done)) - - describe('Test for removal of multiple versions', function () { - let isVersioningSupported = false - const objVersionList = [] - step( - `setBucketVersioning(bucketName, versionConfig):_bucketName:${fdPrefixBucketName},versionConfig:{Status:"Enabled"} `, - (done) => { - client.setBucketVersioning(fdPrefixBucketName, { Status: 'Enabled' }, (err) => { - if (err && err.code === 'NotImplemented') { - return done() - } - if (err) { - return done(err) - } - isVersioningSupported = true - done() - }) - }, - ) - - step( - `putObject(bucketName, objectName, stream)_bucketName:${fdPrefixBucketName}, objectName:${fdPrefixObjName}, stream:100Kib_`, - (done) => { - if (isVersioningSupported) { - client - .putObject(fdPrefixBucketName, fdPrefixObjName, fdPrefixObject) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - // Put two versions of the same object. - step( - `putObject(bucketName, objectName, stream)_bucketName:${fdPrefixBucketName}, objectName:${fdPrefixObjName}, stream:100Kib_`, - (done) => { - if (isVersioningSupported) { - client - .putObject(fdPrefixBucketName, fdPrefixObjName, fdPrefixObject) - .then(() => done()) - .catch(done) - } else { - done() - } - }, - ) - - step( - `removeObject(bucketName, objectList, removeOpts)_bucketName:${fdPrefixBucketName}_Remove ${objVersionList.length} objects`, - (done) => { - if (isVersioningSupported) { - client.removeObject(fdPrefixBucketName, 'my-prefix/', { forceDelete: true }, () => { - done() - }) - } else { - done() - } - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${fdPrefixBucketName}, prefix: '', recursive:true_`, - (done) => { - if (isVersioningSupported) { - client - .listObjects(fdPrefixBucketName, '/my-prefix', true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(0, objVersionList.length)) { - return done() - } - return done(new Error(`listObjects lists ${objVersionList.length} objects, expected 0`)) - }) - .on('data', (data) => { - objVersionList.push(data) - }) - } else { - done() - } - }, - ) - }) - }) - - describe('Force Deletion of objects without versions', function () { - // Isolate the bucket/object for easy debugging and tracking. - const versionedBucketName = 'minio-js-fd-nv-' + uuid.v4() - const versioned_100kbObjectName = 'datafile-100-kB' - const versioned_100kb_Object = dataDir - ? fs.readFileSync(dataDir + '/' + versioned_100kbObjectName) - : Buffer.alloc(100 * 1024, 0) - - before((done) => client.makeBucket(versionedBucketName, '', done)) - after((done) => client.removeBucket(versionedBucketName, done)) - - describe('Test force removal of an object', function () { - step( - `putObject(bucketName, objectName, stream)_bucketName:${versionedBucketName}, objectName:${versioned_100kbObjectName}, stream:100Kib_`, - (done) => { - client - .putObject(versionedBucketName, versioned_100kbObjectName, versioned_100kb_Object) - .then(() => done()) - .catch(done) - }, - ) - - step( - `removeObject(bucketName, objectList, removeOpts)_bucketName:${versionedBucketName}_Remove 1 object`, - (done) => { - client.removeObject(versionedBucketName, versioned_100kbObjectName, { forceDelete: true }, () => { - done() - }) - }, - ) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, - (done) => { - let objVersionList = [] - client - .listObjects(versionedBucketName, '', true, {}) - .on('error', done) - .on('end', () => { - if (_.isEqual(0, objVersionList.length)) { - return done() - } - return done(new Error(`listObjects lists ${objVersionList.length} objects, expected 0`)) - }) - .on('data', (data) => { - objVersionList.push(data) - }) - }, - ) - }) - }) - - describe('Force Deletion of prefix', function () { - // Isolate the bucket/object for easy debugging and tracking. - const fdPrefixBucket = 'minio-js-fd-nv-' + uuid.v4() - const fdObjectName = 'my-prefix/datafile-100-kB' - const fdObject = dataDir ? fs.readFileSync(dataDir + '/' + fdObjectName) : Buffer.alloc(100 * 1024, 0) - - before((done) => client.makeBucket(fdPrefixBucket, '', done)) - after((done) => client.removeBucket(fdPrefixBucket, done)) - - describe('Test force removal of a prefix', function () { - step( - `putObject(bucketName, objectName, stream)_bucketName:${fdPrefixBucket}, objectName:${fdObjectName}, stream:100Kib_`, - (done) => { - client - .putObject(fdPrefixBucket, fdObjectName, fdObject) - .then(() => done()) - .catch(done) - }, - ) - - step(`removeObject(bucketName, objectList, removeOpts)_bucketName:${fdPrefixBucket}_Remove 1 object`, (done) => { - client.removeObject(fdPrefixBucket, '/my-prefix', { forceDelete: true }, () => { - done() - }) - }) - - step( - `listObjects(bucketName, prefix, recursive)_bucketName:${fdPrefixBucket}, prefix: 'my-prefix', recursive:true_`, - (done) => { - let objList = [] - client - .listObjects(fdPrefixBucket, 'my-prefix', true, {}) - .on('error', done) - .on('end', () => { - if (_.isEqual(0, objList.length)) { - return done() - } - return done(new Error(`listObjects lists ${objList.length} objects, expected 0`)) - }) - .on('data', (data) => { - objList.push(data) - }) - }, - ) - }) - }) -}) diff --git a/src/test/unit/test.js b/src/test/unit/test.js deleted file mode 100644 index 4c713607..00000000 --- a/src/test/unit/test.js +++ /dev/null @@ -1,2108 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -require('source-map-support').install() - -import { assert } from 'chai' -import Nock from 'nock' -import Stream from 'stream' - -import { - calculateEvenSplits, - CopyDestinationOptions, - CopySourceOptions, - isArray, - isValidEndpoint, - isValidIP, - makeDateLong, - makeDateShort, - partsRequired, -} from '../../../dist/main/helpers' -import * as Minio from '../../../dist/main/minio' - -var Package = require('../../../package.json') - -describe('Helpers', () => { - it('should validate for s3 endpoint', () => { - assert.equal(isValidEndpoint('s3.amazonaws.com'), true) - }) - it('should validate for s3 china', () => { - assert.equal(isValidEndpoint('s3.cn-north-1.amazonaws.com.cn'), true) - }) - it('should validate for us-west-2', () => { - assert.equal(isValidEndpoint('s3-us-west-2.amazonaws.com'), true) - }) - it('should fail for invalid endpoint characters', () => { - assert.equal(isValidEndpoint('111.#2.11'), false) - }) - - it('should make date short', () => { - let date = new Date('2012-12-03T17:25:36.331Z') - - assert.equal(makeDateShort(date), '20121203') - }) - it('should make date long', () => { - let date = new Date('2017-08-11T17:26:34.935Z') - - assert.equal(makeDateLong(date), '20170811T172634Z') - }) - - // Adopted from minio-go sdk - const oneGB = 1024 * 1024 * 1024 - const fiveGB = 5 * oneGB - - const OBJ_SIZES = { - gb1: oneGB, - gb5: fiveGB, - gb5p1: fiveGB + 1, - gb10p1: 2 * fiveGB + 1, - gb10p2: 2 * fiveGB + 2, - } - - const maxMultipartPutObjectSize = 1024 * 1024 * 1024 * 1024 * 5 - - it('Parts Required Test cases ', () => { - const expectedPartsRequiredTestCases = [ - { value: 0, expected: 0 }, - { value: 1, expected: 1 }, - { value: fiveGB, expected: 10 }, - { value: OBJ_SIZES.gb5p1, expected: 10 }, - { value: 2 * fiveGB, expected: 20 }, - { value: OBJ_SIZES.gb10p1, expected: 20 }, - { value: OBJ_SIZES.gb10p2, expected: 20 }, - { value: OBJ_SIZES.gb10p1 + OBJ_SIZES.gb10p2, expected: 40 }, - { value: maxMultipartPutObjectSize, expected: 10000 }, - ] - - expectedPartsRequiredTestCases.forEach((testCase) => { - const fnResult = partsRequired(testCase.value) - assert.equal(fnResult, testCase.expected) - }) - }) - it('Even split of Sizes Test cases ', () => { - // Adopted from minio-go sdk - const expectedSplitsTestCases = [ - { size: 0, sourceConfig: new CopySourceOptions({ Start: -1 }), expectedStart: null, expectedEnd: null }, - { size: 1, sourceConfig: new CopySourceOptions({ Start: -1 }), expectedStart: [undefined], expectedEnd: [NaN] }, - { size: 1, sourceConfig: new CopySourceOptions({ Start: 0 }), expectedStart: [0], expectedEnd: [0] }, - { - size: OBJ_SIZES.gb1, - sourceConfig: new CopySourceOptions({ Start: -1 }), - expectedStart: [0, 536870912], - expectedEnd: [536870911, 1073741823], - }, - { - size: OBJ_SIZES.gb5, - sourceConfig: new CopySourceOptions({ Start: -1 }), - expectedStart: [ - 0, 536870912, 1073741824, 1610612736, 2147483648, 2684354560, 3221225472, 3758096384, 4294967296, 4831838208, - ], - expectedEnd: [ - 536870911, 1073741823, 1610612735, 2147483647, 2684354559, 3221225471, 3758096383, 4294967295, 4831838207, - 5368709119, - ], - }, - - // 2 part splits - { - size: OBJ_SIZES.gb5p1, - sourceConfig: new CopySourceOptions({ Start: -1 }), - expectedStart: [ - 0, 536870913, 1073741825, 1610612737, 2147483649, 2684354561, 3221225473, 3758096385, 4294967297, 4831838209, - ], - expectedEnd: [ - 536870912, 1073741824, 1610612736, 2147483648, 2684354560, 3221225472, 3758096384, 4294967296, 4831838208, - 5368709120, - ], - }, - { - size: OBJ_SIZES.gb5p1, - sourceConfig: new CopySourceOptions({ Start: -1 }), - expectedStart: [ - 0, 536870913, 1073741825, 1610612737, 2147483649, 2684354561, 3221225473, 3758096385, 4294967297, 4831838209, - ], - expectedEnd: [ - 536870912, 1073741824, 1610612736, 2147483648, 2684354560, 3221225472, 3758096384, 4294967296, 4831838208, - 5368709120, - ], - }, - - // 3 part splits - { - size: OBJ_SIZES.gb10p1, - sourceConfig: new CopySourceOptions({ Start: -1 }), - expectedStart: [ - 0, 536870913, 1073741825, 1610612737, 2147483649, 2684354561, 3221225473, 3758096385, 4294967297, 4831838209, - 5368709121, 5905580033, 6442450945, 6979321857, 7516192769, 8053063681, 8589934593, 9126805505, 9663676417, - 10200547329, - ], - expectedEnd: [ - 536870912, 1073741824, 1610612736, 2147483648, 2684354560, 3221225472, 3758096384, 4294967296, 4831838208, - 5368709120, 5905580032, 6442450944, 6979321856, 7516192768, 8053063680, 8589934592, 9126805504, 9663676416, - 10200547328, 10737418240, - ], - }, - { - size: OBJ_SIZES.gb10p2, - sourceConfig: new CopySourceOptions({ Start: -1 }), - expectedStart: [ - 0, 536870913, 1073741826, 1610612738, 2147483650, 2684354562, 3221225474, 3758096386, 4294967298, 4831838210, - 5368709122, 5905580034, 6442450946, 6979321858, 7516192770, 8053063682, 8589934594, 9126805506, 9663676418, - 10200547330, - ], - expectedEnd: [ - 536870912, 1073741825, 1610612737, 2147483649, 2684354561, 3221225473, 3758096385, 4294967297, 4831838209, - 5368709121, 5905580033, 6442450945, 6979321857, 7516192769, 8053063681, 8589934593, 9126805505, 9663676417, - 10200547329, 10737418241, - ], - }, - ] - - expectedSplitsTestCases.forEach((testCase) => { - const fnResult = calculateEvenSplits(testCase.size, testCase) - const { startIndex, endIndex } = fnResult || {} - - if (isArray(startIndex) && isArray(endIndex)) { - const isExpectedResult = - startIndex.length === testCase.expectedStart.length && endIndex.length === testCase.expectedEnd.length - assert.equal(isExpectedResult, true) - } else { - // null cases. - assert.equal(startIndex, expectedSplitsTestCases.expectedStart) - assert.equal(endIndex, expectedSplitsTestCases.expectedEnd) - } - }) - }) -}) - -describe('CopyConditions', () => { - let date = 'Fri, 11 Aug 2017 19:34:18 GMT' - - let cc = new Minio.CopyConditions() - - describe('#setModified', () => { - it('should take a date argument', () => { - cc.setModified(new Date(date)) - - assert.equal(cc.modified, date) - }) - - it('should throw without date', () => { - assert.throws(() => { - cc.setModified() - }, /date must be of type Date/) - - assert.throws(() => { - cc.setModified({ hi: 'there' }) - }, /date must be of type Date/) - }) - }) - - describe('#setUnmodified', () => { - it('should take a date argument', () => { - cc.setUnmodified(new Date(date)) - - assert.equal(cc.unmodified, date) - }) - - it('should throw without date', () => { - assert.throws(() => { - cc.setUnmodified() - }, /date must be of type Date/) - - assert.throws(() => { - cc.setUnmodified({ hi: 'there' }) - }, /date must be of type Date/) - }) - }) -}) - -describe('Client', function () { - var nockRequests = [] - this.timeout(5000) - beforeEach(() => { - Nock.cleanAll() - nockRequests = [] - }) - afterEach(() => { - nockRequests.forEach((element) => { - if (!element.request.isDone()) { - element.request.done() - } - }) - }) - var client = new Minio.Client({ - endPoint: 'localhost', - port: 9000, - accessKey: 'accesskey', - secretKey: 'secretkey', - useSSL: false, - }) - describe('new client', () => { - it('should work with https', () => { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - assert.equal(client.port, 443) - }) - it('should override port with http', () => { - var client = new Minio.Client({ - endPoint: 'localhost', - port: 9000, - accessKey: 'accesskey', - secretKey: 'secretkey', - useSSL: false, - }) - assert.equal(client.port, 9000) - }) - it('should work with http', () => { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - useSSL: false, - }) - assert.equal(client.port, 80) - }) - it('should override port with https', () => { - var client = new Minio.Client({ - endPoint: 'localhost', - port: 9000, - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - assert.equal(client.port, 9000) - }) - it('should fail with url', (done) => { - try { - new Minio.Client({ - endPoint: 'http://localhost:9000', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - } catch (e) { - done() - } - }) - it('should fail with alphanumeric', (done) => { - try { - new Minio.Client({ - endPoint: 'localhost##$@3', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - } catch (e) { - done() - } - }) - it('should fail with no url', (done) => { - try { - new Minio.Client({ - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - } catch (e) { - done() - } - }) - it('should fail with bad port', (done) => { - try { - new Minio.Client({ - endPoint: 'localhost', - port: -1, - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - } catch (e) { - done() - } - }) - it('should fail when secure param is passed', (done) => { - try { - new Minio.Client({ - endPoint: 'localhost', - secure: false, - port: 9000, - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - } catch (e) { - done() - } - }) - it('should fail when secure param is passed', (done) => { - try { - new Minio.Client({ - endPoint: 'localhost', - secure: true, - port: 9000, - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - } catch (e) { - done() - } - }) - }) - describe('Presigned URL', () => { - describe('presigned-get', () => { - it('should not generate presigned url with no access key', (done) => { - try { - var client = new Minio.Client({ - endPoint: 'localhost', - port: 9000, - useSSL: false, - }) - client.presignedGetObject('bucket', 'object', 1000, function () {}) - } catch (e) { - done() - } - }) - it('should not generate presigned url with wrong expires param', (done) => { - try { - client.presignedGetObject('bucket', 'object', '0', function () {}) - } catch (e) { - done() - } - }) - }) - describe('presigned-put', () => { - it('should not generate presigned url with no access key', (done) => { - try { - var client = new Minio.Client({ - endPoint: 'localhost', - port: 9000, - useSSL: false, - }) - client.presignedPutObject('bucket', 'object', 1000, function () {}) - } catch (e) { - done() - } - }) - it('should not generate presigned url with wrong expires param', (done) => { - try { - client.presignedPutObject('bucket', 'object', '0', function () {}) - } catch (e) { - done() - } - }) - }) - describe('presigned-post-policy', () => { - it('should not generate content type for undefined value', () => { - assert.throws(() => { - var policy = client.newPostPolicy() - policy.setContentType() - }, /content-type cannot be null/) - }) - it('should not generate content disposition for undefined value', () => { - assert.throws(() => { - var policy = client.newPostPolicy() - policy.setContentDisposition() - }, /content-disposition cannot be null/) - }) - it('should not generate user defined metadata for string value', () => { - assert.throws(() => { - var policy = client.newPostPolicy() - policy.setUserMetaData('123') - }, /metadata should be of type "object"/) - }) - it('should not generate user defined metadata for null value', () => { - assert.throws(() => { - var policy = client.newPostPolicy() - policy.setUserMetaData(null) - }, /metadata should be of type "object"/) - }) - it('should not generate user defined metadata for undefined value', () => { - assert.throws(() => { - var policy = client.newPostPolicy() - policy.setUserMetaData() - }, /metadata should be of type "object"/) - }) - }) - }) - describe('User Agent', () => { - it('should have a default user agent', () => { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - assert.equal(`MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version}`, client.userAgent) - }) - it('should set user agent', () => { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - client.setAppInfo('test', '3.2.1') - assert.equal( - `MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version} test/3.2.1`, - client.userAgent, - ) - }) - it('should set user agent without comments', () => { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - client.setAppInfo('test', '3.2.1') - assert.equal( - `MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version} test/3.2.1`, - client.userAgent, - ) - }) - it('should not set user agent without name', (done) => { - try { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - client.setAppInfo(null, '3.2.1') - } catch (e) { - done() - } - }) - it('should not set user agent with empty name', (done) => { - try { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - client.setAppInfo('', '3.2.1') - } catch (e) { - done() - } - }) - it('should not set user agent without version', (done) => { - try { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - client.setAppInfo('test', null) - } catch (e) { - done() - } - }) - it('should not set user agent with empty version', (done) => { - try { - var client = new Minio.Client({ - endPoint: 'localhost', - accessKey: 'accesskey', - secretKey: 'secretkey', - }) - client.setAppInfo('test', '') - } catch (e) { - done() - } - }) - }) - - describe('object level', () => { - describe('#getObject(bucket, object, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.getObject(null, 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getObject('', 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getObject(' \n \t ', 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on null object', (done) => { - try { - client.getObject('hello', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.getObject('hello', '', function () {}) - } catch (e) { - done() - } - }) - }) - - describe('#putObject(bucket, object, source, size, contentType, callback)', () => { - describe('with small objects using single put', () => { - it('should fail when data is smaller than specified', (done) => { - var s = new Stream.Readable() - s._read = function () {} - s.push('hello world') - s.push(null) - client.putObject('bucket', 'object', s, 12, '', (e) => { - if (e) { - done() - } - }) - }) - it('should fail when data is larger than specified', (done) => { - var s = new Stream.Readable() - s._read = function () {} - s.push('hello world') - s.push(null) - client.putObject('bucket', 'object', s, 10, '', (e) => { - if (e) { - done() - } - }) - }) - it('should fail with invalid bucket name', () => { - assert.throws(() => { - client.putObject('ab', 'object', () => {}) - }, /Invalid bucket name/) - }) - it('should fail with invalid object name', () => { - assert.throws(() => { - client.putObject('bucket', '', () => {}) - }, /Invalid object name/) - }) - it('should error with size > maxObjectSize', () => { - assert.throws(() => { - client.putObject('bucket', 'object', new Stream.Readable(), client.maxObjectSize + 1, () => {}) - }, /size should not be more than/) - }) - it('should fail on null bucket', (done) => { - try { - client.putObject(null, 'hello', null, 1, '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.putObject(' \n \t ', 'hello', null, 1, '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.putObject('', 'hello', null, 1, '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on null object', (done) => { - try { - client.putObject('hello', null, null, 1, '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.putObject('hello', '', null, 1, '', function () {}) - } catch (e) { - done() - } - }) - }) - }) - - describe('#removeAllBucketNotification()', () => { - it('should error on invalid arguments', () => { - assert.throws(() => { - client.removeAllBucketNotification( - 'ab', - () => {}, - function () {}, - ) - }, /Invalid bucket name/) - }) - }) - - describe('#setBucketNotification()', () => { - it('should error on invalid arguments', () => { - assert.throws(() => { - client.setBucketNotification('ab', () => {}) - }, /Invalid bucket name/) - assert.throws(() => { - client.setBucketNotification('bucket', 49, () => {}) - }, /notification config should be of type "Object"/) - }) - }) - - describe('#getBucketNotification()', () => { - it('should error on invalid arguments', () => { - assert.throws(() => { - client.getBucketNotification('ab', () => {}) - }, /Invalid bucket name/) - }) - }) - - describe('#listenBucketNotification', () => { - it('should error on invalid arguments', () => { - assert.throws(() => { - client.listenBucketNotification('ab', 'prefix', 'suffix', ['events']) - }, /Invalid bucket name/) - assert.throws(() => { - client.listenBucketNotification('bucket', {}, 'suffix', ['events']) - }, /prefix must be of type string/) - assert.throws(() => { - client.listenBucketNotification('bucket', '', {}, ['events']) - }, /suffix must be of type string/) - assert.throws(() => { - client.listenBucketNotification('bucket', '', '', {}) - }, /events must be of type Array/) - }) - }) - - describe('#statObject(bucket, object, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.statObject(null, 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.statObject('', 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.statObject(' \n \t ', 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on null object', (done) => { - try { - client.statObject('hello', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.statObject('hello', '', function () {}) - } catch (e) { - done() - } - }) - - it('should fail on incompatible argument type (number) for statOpts object', (done) => { - try { - client.statObject('hello', 'testStatOpts', 1, function () {}) - } catch (e) { - done() - } - }) - it('should fail on incompatible argument type (null) for statOpts object', (done) => { - try { - client.statObject('hello', 'testStatOpts', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on incompatible argument type (sting) for statOpts object', (done) => { - try { - client.statObject('hello', 'testStatOpts', ' ', function () {}) - } catch (e) { - done() - } - }) - }) - - describe('#removeObject(bucket, object, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.removeObject(null, 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.removeObject('', 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.removeObject(' \n \t ', 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on null object', (done) => { - try { - client.removeObject('hello', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.removeObject('hello', '', function () {}) - } catch (e) { - done() - } - }) - // Versioning related options as removeOpts - it('should fail on empty (null) removeOpts object', (done) => { - try { - client.removeObject('hello', 'testRemoveOpts', null, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty (string) removeOpts', (done) => { - try { - client.removeObject('hello', 'testRemoveOpts', '', function () {}) - } catch (e) { - done() - } - }) - }) - - describe('#removeIncompleteUpload(bucket, object, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.removeIncompleteUpload(null, 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.removeIncompleteUpload('', 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.removeIncompleteUpload(' \n \t ', 'hello', function () {}) - } catch (e) { - done() - } - }) - it('should fail on null object', (done) => { - try { - client.removeIncompleteUpload('hello', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.removeIncompleteUpload('hello', '', function () {}) - } catch (e) { - done() - } - }) - }) - }) - - describe('Bucket Versioning APIs', () => { - describe('getBucketVersioning(bucket, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.getBucketVersioning(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getBucketVersioning('', function () {}) - } catch (e) { - done() - } - }) - }) - - describe('setBucketVersioning(bucket, versionConfig, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.setBucketVersioning(null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.setBucketVersioning('', {}, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty versionConfig', (done) => { - try { - client.setBucketVersioning('', null, function () {}) - } catch (e) { - done() - } - }) - }) - }) - - describe('Bucket and Object Tags APIs', () => { - describe('Set Bucket Tags ', () => { - it('should fail on null bucket', (done) => { - try { - client.setBucketTagging(null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.setBucketTagging('', {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail if tags are more than 50', (done) => { - const _50_plus_key_tags = {} - for (let i = 0; i < 51; i += 1) { - _50_plus_key_tags[i] = i - } - try { - client.setBucketTagging('', _50_plus_key_tags, function () {}) - } catch (e) { - done() - } - }) - }) - describe('Get Bucket Tags', () => { - it('should fail on invalid bucket', (done) => { - try { - client.getBucketTagging('nv', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on null bucket', (done) => { - try { - client.getBucketTagging(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getBucketTagging('', function () {}) - } catch (e) { - done() - } - }) - }) - describe('Remove Bucket Tags', () => { - it('should fail on null object', (done) => { - try { - client.removeBucketTagging(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.removeBucketTagging('', function () {}) - } catch (e) { - done() - } - }) - it('should fail on invalid bucket name', (done) => { - try { - client.removeBucketTagging('198.51.100.24', function () {}) - } catch (e) { - done() - } - }) - - it('should fail on invalid bucket name', (done) => { - try { - client.removeBucketTagging('xy', function () {}) - } catch (e) { - done() - } - }) - }) - describe('Put Object Tags', () => { - it('should fail on null object', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on non object tags', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, 'non-obj-tag', function () {}) - } catch (e) { - done() - } - }) - it('should fail if tags are more than 50 on an object', (done) => { - const _50_plus_key_tags = {} - for (let i = 0; i < 51; i += 1) { - _50_plus_key_tags[i] = i - } - try { - client.putObjectTagging('my-bucket-name', null, _50_plus_key_tags, function () {}) - } catch (e) { - done() - } - }) - }) - describe('Get Object Tags', () => { - it('should fail on invalid bucket', (done) => { - try { - client.getObjectTagging('nv', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on null object', (done) => { - try { - client.getObjectTagging('my-bucket-name', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.getObjectTagging('my-bucket-name', null, function () {}) - } catch (e) { - done() - } - }) - }) - describe('Remove Object Tags', () => { - it('should fail on null object', (done) => { - try { - client.removeObjectTagging('my-bucket', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.removeObjectTagging('my-bucket', '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on invalid bucket name', (done) => { - try { - client.removeObjectTagging('198.51.100.24', function () {}) - } catch (e) { - done() - } - }) - - it('should fail on invalid bucket name', (done) => { - try { - client.removeObjectTagging('xy', function () {}) - } catch (e) { - done() - } - }) - }) - }) - - describe('setBucketLifecycle(bucket, lifecycleConfig, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.setBucketLifecycle(null, null, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty bucket', (done) => { - try { - client.setBucketLifecycle('', null, function () {}) - } catch (e) { - done() - } - }) - }) - - describe('getBucketLifecycle(bucket, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.getBucketLifecycle(null, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty bucket', (done) => { - try { - client.getBucketLifecycle('', function () {}) - } catch (e) { - done() - } - }) - }) - describe('removeBucketLifecycle(bucket, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.removeBucketLifecycle(null, null, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty bucket', (done) => { - try { - client.removeBucketLifecycle('', null, function () {}) - } catch (e) { - done() - } - }) - }) - - describe('Object Locking APIs', () => { - describe('getObjectLockConfig(bucket, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.getObjectLockConfig(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getObjectLockConfig('', function () {}) - } catch (e) { - done() - } - }) - }) - - describe('setObjectLockConfig(bucket, lockConfig, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.setObjectLockConfig(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.setObjectLockConfig('', function () {}) - } catch (e) { - done() - } - }) - it('should fail on passing invalid mode ', (done) => { - try { - client.setObjectLockConfig('my-bucket', { mode: 'invalid_mode' }, function () {}) - } catch (e) { - done() - } - }) - it('should fail on passing invalid unit ', (done) => { - try { - client.setObjectLockConfig('my-bucket', { mode: 'COMPLIANCE', unit: 'invalid_unit' }, function () {}) - } catch (e) { - done() - } - }) - it('should fail on passing invalid validity ', (done) => { - try { - client.setObjectLockConfig( - 'my-bucket', - { mode: 'COMPLIANCE', unit: 'invalid_unit', validity: '' }, - function () {}, - ) - } catch (e) { - done() - } - }) - it('should fail on passing invalid config ', (done) => { - try { - client.setObjectLockConfig( - 'my-bucket', - { mode: 'COMPLIANCE', randomProp: true, nonExisting: false }, - function () {}, - ) - } catch (e) { - done() - } - }) - }) - }) - - describe('Object retention APIs', () => { - describe('getObjectRetention(bucket, objectName, getRetentionOpts,callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.getObjectRetention(null, '', '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getObjectRetention('', '', '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on invalid object name', (done) => { - try { - client.getObjectRetention('my-bucket', null, '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on invalid versionId', (done) => { - try { - client.getObjectRetention('my-bucket', 'objectname', { versionId: 123 }, function () {}) - } catch (e) { - done() - } - }) - }) - - describe('putObjectRetention(bucket, objectName, retentionConfig, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.putObjectRetention(null, '', {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.putObjectRetention('', '', {}, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on null object', (done) => { - try { - client.putObjectRetention('my-bucket', null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.putObjectRetention('my-bucket', '', {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on passing invalid mode ', (done) => { - try { - client.putObjectRetention('my-bucket', 'my-object', { mode: 'invalid_mode' }, function () {}) - } catch (e) { - done() - } - }) - it('should fail on passing invalid governanceBypass ', (done) => { - try { - client.putObjectRetention('my-bucket', 'my-object', { governanceBypass: 'nonbool' }, function () {}) - } catch (e) { - done() - } - }) - it('should fail on passing invalid (null) retainUntilDate ', (done) => { - try { - client.putObjectRetention('my-bucket', 'my-object', { retainUntilDate: 12345 }, function () {}) - } catch (e) { - done() - } - }) - it('should fail on passing invalid versionId ', (done) => { - try { - client.putObjectRetention('my-bucket', { versionId: 'COMPLIANCE' }, function () {}) - } catch (e) { - done() - } - }) - }) - }) - - describe('Bucket Encryption APIs', () => { - describe('setBucketEncryption(bucket, encryptionConfig, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.setBucketEncryption(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.setBucketEncryption('', function () {}) - } catch (e) { - done() - } - }) - it('should fail on multiple rules', (done) => { - try { - client.setBucketEncryption( - 'my-bucket', - { - // Default Rule - Rule: [ - { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: 'AES256', - }, - }, - { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: 'AES256', - }, - }, - ], - }, - function () {}, - ) - } catch (e) { - done() - } - }) - }) - - describe('getBucketEncryption(bucket, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.getBucketEncryption(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getBucketEncryption('', function () {}) - } catch (e) { - done() - } - }) - }) - - describe('removeBucketEncryption(bucket, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.removeBucketEncryption(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.removeBucketEncryption('', function () {}) - } catch (e) { - done() - } - }) - }) - }) - describe('Bucket Replication APIs', () => { - describe('setBucketReplication(bucketName, replicationConfig, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.setBucketReplication(null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.setBucketReplication('', {}, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty replicationConfig', (done) => { - try { - client.setBucketReplication('my-bucket', {}, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty replicationConfig role', (done) => { - try { - client.setBucketReplication('my-bucket', { role: '' }, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on invalid value for replicationConfig role', (done) => { - try { - client.setBucketReplication('my-bucket', { role: 12 }, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty value for replicationConfig rules', (done) => { - try { - client.setBucketReplication('my-bucket', { role: 'arn:', rules: [] }, function () {}) - } catch (e) { - done() - } - }) - it('should fail on null value for replicationConfig rules', (done) => { - try { - client.setBucketReplication('my-bucket', { role: 'arn:', rules: null }, function () {}) - } catch (e) { - done() - } - }) - }) - - describe('getBucketReplication(bucketName, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.getBucketReplication(null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getBucketReplication('', {}, function () {}) - } catch (e) { - done() - } - }) - }) - - describe('removeBucketReplication(bucketName, callback)', () => { - it('should fail on null bucket', (done) => { - try { - client.removeBucketReplication(null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.removeBucketReplication('', {}, function () {}) - } catch (e) { - done() - } - }) - }) - }) - - describe('Object Legal Hold APIs', () => { - describe('getObjectLegalHold(bucketName, objectName, getOpts={}, cb)', () => { - it('should fail on null bucket', (done) => { - try { - client.getObjectLegalHold(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.getObjectLegalHold('', function () {}) - } catch (e) { - done() - } - }) - - it('should fail on null objectName', (done) => { - try { - client.getObjectLegalHold('my-bucket', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on null getOpts', (done) => { - try { - client.getObjectLegalHold('my-bucker', 'my-object', null, function () {}) - } catch (e) { - done() - } - }) - }) - - describe('setObjectLegalHold(bucketName, objectName, setOpts={}, cb)', () => { - it('should fail on null bucket', (done) => { - try { - client.setObjectLegalHold(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.setObjectLegalHold('', function () {}) - } catch (e) { - done() - } - }) - - it('should fail on null objectName', (done) => { - try { - client.setObjectLegalHold('my-bucket', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on null setOpts', (done) => { - try { - client.setObjectLegalHold('my-bucker', 'my-object', null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty versionId', (done) => { - try { - client.setObjectLegalHold('my-bucker', 'my-object', {}, function () {}) - } catch (e) { - done() - } - }) - }) - }) - - describe('Compose Object APIs', () => { - describe('composeObject(destObjConfig, sourceObjectList,cb)', () => { - it('should fail on null destination config', (done) => { - try { - client.composeObject(null, function () {}) - } catch (e) { - done() - } - }) - - it('should fail on no array source config', (done) => { - try { - const destOptions = new CopyDestinationOptions({ Bucket: 'test-bucket', Object: 'test-object' }) - client.composeObject(destOptions, 'non-array', function () {}) - } catch (e) { - done() - } - }) - - it('should fail on null source config', (done) => { - try { - const destOptions = new CopyDestinationOptions({ Bucket: 'test-bucket', Object: 'test-object' }) - client.composeObject(destOptions, null, function () {}) - } catch (e) { - done() - } - }) - }) - }) - describe('Select Object Content APIs', () => { - describe('selectObjectContent(bucketName, objectName, selectOpts={}, cb)', () => { - it('should fail on null bucket', (done) => { - try { - client.selectObjectContent(null, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty bucket', (done) => { - try { - client.selectObjectContent('', function () {}) - } catch (e) { - done() - } - }) - - it('should fail on empty object', (done) => { - try { - client.selectObjectContent('my-bucket', '', function () {}) - } catch (e) { - done() - } - }) - it('should fail on null object', (done) => { - try { - client.selectObjectContent('my-bucket', null, function () {}) - } catch (e) { - done() - } - }) - }) - }) -}) - -describe('IP Address Validations', () => { - it('should validate for valid ip', () => { - assert.equal(isValidIP('1.1.1.1'), true) - }) - - it('Check list of IPV4 Invalid addresses', () => { - const invalidIpv4 = [ - ' 127.0.0.1', - '127.0.0.1 ', - '127.0.0.1 127.0.0.1', - '127.0.0.256', - '127.0.0.1//1', - '127.0.0.1/0x1', - '127.0.0.1/-1', - '127.0.0.1/ab', - '127.0.0.1/', - '127.0.0.256/32', - '127.0.0.1/33', - ] - invalidIpv4.map((ip) => { - assert.equal(isValidIP(ip), false) - }) - }) - - it('Check list of IPV4 Valid addresses', () => { - const validIpv4 = ['001.002.003.004', '127.0.0.1', '255.255.255.255', '192.168.1.10'] - validIpv4.map((ip) => { - assert.equal(isValidIP(ip), true) - }) - }) - - it('Check list of IPV6 Invalid addresses', () => { - const invalidIpV6 = [ - "':10.0.0.1", - '-1', - '::1 ::1', - '1.2.3.4:1111:2222:3333:4444::5555', - '1.2.3.4:1111:2222:3333::5555', - '1.2.3.4:1111:2222::5555', - '1.2.3.4:1111::5555', - '1.2.3.4::', - '1.2.3.4::5555', - '11112222:3333:4444:5555:6666:1.2.3.4', - '11112222:3333:4444:5555:6666:7777:8888', - '::1//64', - '::1/0001', - '1111:', - '1111:1.2.3.4', - '1111:2222', - '1111:22223333:4444:5555:6666:1.2.3.4', - '1111:22223333:4444:5555:6666:7777:8888', - '1111:2222:', - '1111:2222:1.2.3.4', - '1111:2222:3333', - '1111:2222:33334444:5555:6666:1.2.3.4', - '1111:2222:33334444:5555:6666:7777:8888', - '1111:2222:3333:', - '1111:2222:3333:1.2.3.4', - '1111:2222:3333:4444', - '1111:2222:3333:44445555:6666:1.2.3.4', - '1111:2222:3333:44445555:6666:7777:8888', - '1111:2222:3333:4444:', - '1111:2222:3333:4444:1.2.3.4', - '1111:2222:3333:4444:5555', - '1111:2222:3333:4444:55556666:1.2.3.4', - '1111:2222:3333:4444:55556666:7777:8888', - '1111:2222:3333:4444:5555:', - '1111:2222:3333:4444:5555:1.2.3.4', - '1111:2222:3333:4444:5555:6666', - '1111:2222:3333:4444:5555:66661.2.3.4', - '1111:2222:3333:4444:5555:66667777:8888', - '1111:2222:3333:4444:5555:6666:', - '1111:2222:3333:4444:5555:6666:1.2.3.4.5', - '1111:2222:3333:4444:5555:6666:255.255.255255', - '1111:2222:3333:4444:5555:6666:255.255255.255', - '1111:2222:3333:4444:5555:6666:255255.255.255', - '1111:2222:3333:4444:5555:6666:256.256.256.256', - '1111:2222:3333:4444:5555:6666:7777', - '1111:2222:3333:4444:5555:6666:77778888', - '1111:2222:3333:4444:5555:6666:7777:', - '1111:2222:3333:4444:5555:6666:7777:1.2.3.4', - '1111:2222:3333:4444:5555:6666:7777:::', - '1111:2222:3333:4444:5555:6666::8888:', - '1111:2222:3333:4444:5555:6666:::', - '1111:2222:3333:4444:5555:6666:::8888', - '1111:2222:3333:4444:5555::7777:8888:', - '1111:2222:3333:4444:5555::7777::', - '1111:2222:3333:4444:5555::8888:', - '1111:2222:3333:4444:5555:::', - '1111:2222:3333:4444:5555:::1.2.3.4', - '1111:2222:3333:4444:5555:::7777:8888', - '1111:2222:3333:4444::5555:', - '1111:2222:3333:4444::6666:7777:8888:', - '1111:2222:3333:4444::6666:7777::', - '1111:2222:3333:4444::6666::8888', - '1111:2222:3333:4444::7777:8888:', - '1111:2222:3333:4444::8888:', - '1111:2222:3333:4444:::', - '1111:2222:3333:4444:::6666:1.2.3.4', - '1111:2222:3333:4444:::6666:7777:8888', - '1111:2222:3333::5555:', - '1111:2222:3333::5555:6666:7777:8888:', - '1111:2222:3333::5555:6666:7777::', - '1111:2222:3333::5555:6666::8888', - '1111:2222:3333::5555::1.2.3.4', - '1111:2222:3333::5555::7777:8888', - '1111:2222:3333::6666:7777:8888:', - '1111:2222:3333::7777:8888:', - '1111:2222:3333::8888:', - '1111:2222:3333:::', - '1111:2222:3333:::5555:6666:1.2.3.4', - '1111:2222:3333:::5555:6666:7777:8888', - '1111:2222::4444:5555:6666:7777:8888:', - '1111:2222::4444:5555:6666:7777::', - '1111:2222::4444:5555:6666::8888', - '1111:2222::4444:5555::1.2.3.4', - '1111:2222::4444:5555::7777:8888', - '1111:2222::4444::6666:1.2.3.4', - '1111:2222::4444::6666:7777:8888', - '1111:2222::5555:', - '1111:2222::5555:6666:7777:8888:', - '1111:2222::6666:7777:8888:', - '1111:2222::7777:8888:', - '1111:2222::8888:', - '1111:2222:::', - '1111:2222:::4444:5555:6666:1.2.3.4', - '1111:2222:::4444:5555:6666:7777:8888', - '1111::3333:4444:5555:6666:7777:8888:', - '1111::3333:4444:5555:6666:7777::', - '1111::3333:4444:5555:6666::8888', - '1111::3333:4444:5555::1.2.3.4', - '1111::3333:4444:5555::7777:8888', - '1111::3333:4444::6666:1.2.3.4', - '1111::3333:4444::6666:7777:8888', - '1111::3333::5555:6666:1.2.3.4', - '1111::3333::5555:6666:7777:8888', - '1111::4444:5555:6666:7777:8888:', - '1111::5555:', - '1111::5555:6666:7777:8888:', - '1111::6666:7777:8888:', - '1111::7777:8888:', - '1111::8888:', - '1111:::', - '1111:::3333:4444:5555:6666:1.2.3.4', - '1111:::3333:4444:5555:6666:7777:8888', - '12345::6:7:8', - '124.15.6.89/60', - '1:2:3:4:5:6:7:8:9', - '1:2:3::4:5:6:7:8:9', - '1:2:3::4:5::7:8', - '1::1.2.256.4', - '1::1.2.3.256', - '1::1.2.3.300', - '1::1.2.3.900', - '1::1.2.300.4', - '1::1.2.900.4', - '1::1.256.3.4', - '1::1.300.3.4', - '1::1.900.3.4', - '1::256.2.3.4', - '1::260.2.3.4', - '1::2::3', - '1::300.2.3.4', - '1::300.300.300.300', - '1::3000.30.30.30', - '1::400.2.3.4', - '1::5:1.2.256.4', - '1::5:1.2.3.256', - '1::5:1.2.3.300', - '1::5:1.2.3.900', - '1::5:1.2.300.4', - '1::5:1.2.900.4', - '1::5:1.256.3.4', - '1::5:1.300.3.4', - '1::5:1.900.3.4', - '1::5:256.2.3.4', - '1::5:260.2.3.4', - '1::5:300.2.3.4', - '1::5:300.300.300.300', - '1::5:3000.30.30.30', - '1::5:400.2.3.4', - '1::5:900.2.3.4', - '1::900.2.3.4', - '1:::3:4:5', - '2001:0000:1234: 0000:0000:C1C0:ABCD:0876', - '2001:0000:1234:0000:0000:C1C0:ABCD:0876 0', - '2001:1:1:1:1:1:255Z255X255Y255', - '2001::FFD3::57ab', - '2001:DB8:0:0:8:800:200C:417A:221', - '2001:db8:85a3::8a2e:37023:7334', - '2001:db8:85a3::8a2e:370k:7334', - '3ffe:0b00:0000:0001:0000:0000:000a', - '3ffe:b00::1::a', - ':', - ':1.2.3.4', - ':1111:2222:3333:4444:5555:6666:1.2.3.4', - ':1111:2222:3333:4444:5555:6666:7777:8888', - ':1111:2222:3333:4444:5555:6666:7777::', - ':1111:2222:3333:4444:5555:6666::', - ':1111:2222:3333:4444:5555:6666::8888', - ':1111:2222:3333:4444:5555::', - ':1111:2222:3333:4444:5555::1.2.3.4', - ':1111:2222:3333:4444:5555::7777:8888', - ':1111:2222:3333:4444:5555::8888', - ':1111:2222:3333:4444::', - ':1111:2222:3333:4444::1.2.3.4', - ':1111:2222:3333:4444::5555', - ':1111:2222:3333:4444::6666:1.2.3.4', - ':1111:2222:3333:4444::6666:7777:8888', - ':1111:2222:3333:4444::7777:8888', - ':1111:2222:3333:4444::8888', - ':1111:2222:3333::', - ':1111:2222:3333::1.2.3.4', - ':1111:2222:3333::5555', - ':1111:2222:3333::5555:6666:1.2.3.4', - ':1111:2222:3333::5555:6666:7777:8888', - ':1111:2222:3333::6666:1.2.3.4', - ':1111:2222:3333::6666:7777:8888', - ':1111:2222:3333::7777:8888', - ':1111:2222:3333::8888', - ':1111:2222::', - ':1111:2222::1.2.3.4', - ':1111:2222::4444:5555:6666:1.2.3.4', - ':1111:2222::4444:5555:6666:7777:8888', - ':1111:2222::5555', - ':1111:2222::5555:6666:1.2.3.4', - ':1111:2222::5555:6666:7777:8888', - ':1111:2222::6666:1.2.3.4', - ':1111:2222::6666:7777:8888', - ':1111:2222::7777:8888', - ':1111:2222::8888', - ':1111::', - ':1111::1.2.3.4', - ':1111::3333:4444:5555:6666:1.2.3.4', - ':1111::3333:4444:5555:6666:7777:8888', - ':1111::4444:5555:6666:1.2.3.4', - ':1111::4444:5555:6666:7777:8888', - ':1111::5555', - ':1111::5555:6666:1.2.3.4', - ':1111::5555:6666:7777:8888', - ':1111::6666:1.2.3.4', - ':1111::6666:7777:8888', - ':1111::7777:8888', - ':1111::8888', - ':2222:3333:4444:5555:6666:1.2.3.4', - ':2222:3333:4444:5555:6666:7777:8888', - ':3333:4444:5555:6666:1.2.3.4', - ':3333:4444:5555:6666:7777:8888', - ':4444:5555:6666:1.2.3.4', - ':4444:5555:6666:7777:8888', - ':5555:6666:1.2.3.4', - ':5555:6666:7777:8888', - ':6666:1.2.3.4', - ':6666:7777:8888', - ':7777:8888', - ':8888', - '::-1', - '::.', - '::..', - '::...', - '::...4', - '::..3.', - '::..3.4', - '::.2..', - '::.2.3.', - '::.2.3.4', - '::1...', - '::1.2..', - '::1.2.256.4', - '::1.2.3.', - '::1.2.3.256', - '::1.2.3.300', - '::1.2.3.900', - '::1.2.300.4', - '::1.2.900.4', - '::1.256.3.4', - '::1.300.3.4', - '::1.900.3.4', - '::1111:2222:3333:4444:5555:6666::', - '::2222:3333:4444:5555:6666:7777:8888:', - '::2222:3333:4444:5555:7777:8888::', - '::2222:3333:4444:5555:7777::8888', - '::2222:3333:4444:5555::1.2.3.4', - '::2222:3333:4444:5555::7777:8888', - '::2222:3333:4444::6666:1.2.3.4', - '::2222:3333:4444::6666:7777:8888', - '::2222:3333::5555:6666:1.2.3.4', - '::2222:3333::5555:6666:7777:8888', - '::2222::4444:5555:6666:1.2.3.4', - '::2222::4444:5555:6666:7777:8888', - '::256.2.3.4', - '::260.2.3.4', - '::300.2.3.4', - '::300.300.300.300', - '::3000.30.30.30', - '::3333:4444:5555:6666:7777:8888:', - '::400.2.3.4', - '::4444:5555:6666:7777:8888:', - '::5555:', - '::5555:6666:7777:8888:', - '::6666:7777:8888:', - '::7777:8888:', - '::8888:', - '::900.2.3.4', - ':::', - ':::1.2.3.4', - ':::2222:3333:4444:5555:6666:1.2.3.4', - ':::2222:3333:4444:5555:6666:7777:8888', - ':::3333:4444:5555:6666:7777:8888', - ':::4444:5555:6666:1.2.3.4', - ':::4444:5555:6666:7777:8888', - ':::5555', - ':::5555:6666:1.2.3.4', - ':::5555:6666:7777:8888', - ':::6666:1.2.3.4', - ':::6666:7777:8888', - ':::7777:8888', - ':::8888', - '::ffff:192x168.1.26', - '::ffff:2.3.4', - '::ffff:257.1.2.3', - 'FF01::101::2', - 'FF02:0000:0000:0000:0000:0000:0000:0000:0001', - 'XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:1.2.3.4', - 'XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX', - 'a::b::c', - 'a::g', - 'a:a:a:a:a:a:a:a:a', - 'a:aaaaa::', - 'a:b', - 'a:b:c:d:e:f:g:0', - 'ffff:', - 'ffff::ffff::ffff', - 'ffgg:ffff:ffff:ffff:ffff:ffff:ffff:ffff', - 'ldkfj', - '::/129', - '1000:://32', - '::/', - ] - invalidIpV6.map((ip) => { - const valid = isValidIP(ip) - assert.equal(valid, false) - }) - }) - - it('Check list of IPV6 Valid addresses', () => { - const validIpv6 = [ - '0000:0000:0000:0000:0000:0000:0000:0000', - '0000:0000:0000:0000:0000:0000:0000:0001', - '0:0:0:0:0:0:0:0', - '0:0:0:0:0:0:0:1', - '0:0:0:0:0:0:0::', - '0:0:0:0:0:0:13.1.68.3', - '0:0:0:0:0:0::', - '0:0:0:0:0::', - '0:0:0:0:0:FFFF:129.144.52.38', - '0:0:0:0:1:0:0:0', - '0:0:0:0::', - '0:0:0::', - '0:0::', - '0:1:2:3:4:5:6:7', - '0::', - '0:a:b:c:d:e:f::', - '1080:0:0:0:8:800:200c:417a', - '1080::8:800:200c:417a', - '1111:2222:3333:4444:5555:6666:123.123.123.123', - '1111:2222:3333:4444:5555:6666:7777:8888', - '1111:2222:3333:4444:5555:6666:7777::', - '1111:2222:3333:4444:5555:6666::', - '1111:2222:3333:4444:5555:6666::8888', - '1111:2222:3333:4444:5555::', - '1111:2222:3333:4444:5555::7777:8888', - '1111:2222:3333:4444:5555::8888', - '1111:2222:3333:4444::', - '1111:2222:3333:4444::6666:123.123.123.123', - '1111:2222:3333:4444::6666:7777:8888', - '1111:2222:3333:4444::7777:8888', - '1111:2222:3333:4444::8888', - '1111:2222:3333::', - '1111:2222:3333::5555:6666:123.123.123.123', - '1111:2222:3333::5555:6666:7777:8888', - '1111:2222:3333::6666:123.123.123.123', - '1111:2222:3333::6666:7777:8888', - '1111:2222:3333::7777:8888', - '1111:2222:3333::8888', - '1111:2222::', - '1111:2222::4444:5555:6666:123.123.123.123', - '1111:2222::4444:5555:6666:7777:8888', - '1111:2222::5555:6666:123.123.123.123', - '1111:2222::5555:6666:7777:8888', - '1111:2222::6666:123.123.123.123', - '1111:2222::6666:7777:8888', - '1111:2222::7777:8888', - '1111:2222::8888', - '1111::', - '1111::3333:4444:5555:6666:123.123.123.123', - '1111::3333:4444:5555:6666:7777:8888', - '1111::4444:5555:6666:123.123.123.123', - '1111::4444:5555:6666:7777:8888', - '1111::5555:6666:123.123.123.123', - '1111::5555:6666:7777:8888', - '1111::6666:123.123.123.123', - '1111::6666:7777:8888', - '1111::7777:8888', - '1111::8888', - '1:2:3:4:5:6:1.2.3.4', - '1:2:3:4:5:6:7:8', - '1:2:3:4:5:6::', - '1:2:3:4:5:6::8', - '1:2:3:4:5::', - '1:2:3:4:5::7:8', - '1:2:3:4:5::8', - '1:2:3:4::', - '1:2:3:4::5:1.2.3.4', - '1:2:3:4::7:8', - '1:2:3:4::8', - '1:2:3::', - '1:2:3::5:1.2.3.4', - '1:2:3::7:8', - '1:2:3::8', - '1:2::', - '1:2::5:1.2.3.4', - '1:2::7:8', - '1:2::8', - '1::', - '1::2:3', - '1::2:3:4', - '1::2:3:4:5', - '1::2:3:4:5:6', - '1::2:3:4:5:6:7', - '1::5:1.2.3.4', - '1::5:11.22.33.44', - '1::7:8', - '1::8', - '2001:0000:1234:0000:0000:C1C0:ABCD:0876', - '2001:0000:4136:e378:8000:63bf:3fff:fdd2', - '2001:0db8:0000:0000:0000:0000:1428:57ab', - '2001:0db8:0000:0000:0000::1428:57ab', - '2001:0db8:0:0:0:0:1428:57ab', - '2001:0db8:0:0::1428:57ab', - '2001:0db8:1234:0000:0000:0000:0000:0000', - '2001:0db8:1234::', - '2001:0db8:1234:ffff:ffff:ffff:ffff:ffff', - '2001:0db8:85a3:0000:0000:8a2e:0370:7334', - '2001:0db8::1428:57ab', - '2001::CE49:7601:2CAD:DFFF:7C94:FFFE', - '2001::CE49:7601:E866:EFFF:62C3:FFFE', - '2001:DB8:0:0:8:800:200C:417A', - '2001:DB8::8:800:200C:417A', - '2001:db8:85a3:0:0:8a2e:370:7334', - '2001:db8:85a3::8a2e:370:7334', - '2001:db8::', - '2001:db8::1428:57ab', - '2001:db8:a::123', - '2002::', - '2608::3:5', - '2608:af09:30:0:0:0:0:134', - '2608:af09:30::102a:7b91:c239:baff', - '2::10', - '3ffe:0b00:0000:0000:0001:0000:0000:000a', - '7:6:5:4:3:2:1:0', - '::', - '::0', - '::0:0', - '::0:0:0', - '::0:0:0:0', - '::0:0:0:0:0', - '::0:0:0:0:0:0', - '::0:0:0:0:0:0:0', - '::0:a:b:c:d:e:f', - '::1', - '::123.123.123.123', - '::13.1.68.3', - '::2222:3333:4444:5555:6666:123.123.123.123', - '::2222:3333:4444:5555:6666:7777:8888', - '::2:3', - '::2:3:4', - '::2:3:4:5', - '::2:3:4:5:6', - '::2:3:4:5:6:7', - '::2:3:4:5:6:7:8', - '::3333:4444:5555:6666:7777:8888', - '::4444:5555:6666:123.123.123.123', - '::4444:5555:6666:7777:8888', - '::5555:6666:123.123.123.123', - '::5555:6666:7777:8888', - '::6666:123.123.123.123', - '::6666:7777:8888', - '::7777:8888', - '::8', - '::8888', - '::FFFF:129.144.52.38', - '::ffff:0:0', - '::ffff:0c22:384e', - '::ffff:12.34.56.78', - '::ffff:192.0.2.128', - '::ffff:192.168.1.1', - '::ffff:192.168.1.26', - '::ffff:c000:280', - 'FF01:0:0:0:0:0:0:101', - 'FF01::101', - 'FF02:0000:0000:0000:0000:0000:0000:0001', - 'a:b:c:d:e:f:0::', - 'fe80:0000:0000:0000:0204:61ff:fe9d:f156', - 'fe80:0:0:0:204:61ff:254.157.241.86', - 'fe80:0:0:0:204:61ff:fe9d:f156', - 'fe80::', - 'fe80::1', - 'fe80::204:61ff:254.157.241.86', - 'fe80::204:61ff:fe9d:f156', - 'fe80::217:f2ff:254.7.237.98', - 'fe80::217:f2ff:fe07:ed62', - 'fedc:ba98:7654:3210:fedc:ba98:7654:3210', - 'ff02::1', - 'ffff::', - 'ffff::3:5', - 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', - 'a:0::0:b', - 'a:0:0::0:b', - 'a:0::0:0:b', - 'a::0:0:b', - 'a::0:b', - 'a:0::b', - 'a:0:0::b', - ] - validIpv6.map((ip) => { - const valid = isValidIP(ip) - assert.equal(valid, true) - }) - }) -}) From c21c33580d7996241cfc6f21a1f47853cf063895 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 14:33:42 +0800 Subject: [PATCH 03/78] fix lint --- .eslintrc.js | 7 ++++ gulpfile.js | 111 --------------------------------------------------- 2 files changed, 7 insertions(+), 111 deletions(-) delete mode 100644 gulpfile.js diff --git a/.eslintrc.js b/.eslintrc.js index 767ceb9b..a5bb022c 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -116,5 +116,12 @@ module.exports = { '@typescript-eslint/no-empty-function': 0, }, }, + { + files: ['./types/**/*'], + rules: { + '@typescript-eslint/no-unused-vars': 0, + '@typescript-eslint/no-explicit-any': 0, + }, + }, ], } diff --git a/gulpfile.js b/gulpfile.js deleted file mode 100644 index d293aa82..00000000 --- a/gulpfile.js +++ /dev/null @@ -1,111 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -const babel = require('gulp-babel') -const gulp = require('gulp') -const gulpIf = require('gulp-if') -const sourcemaps = require('gulp-sourcemaps') - -const fs = require('fs') -const browserify = require('browserify') -const mocha = require('gulp-mocha') -const eslint = require('gulp-eslint') - -const compileJS = (src, dest) => { - return gulp - .src(src) - .pipe(sourcemaps.init()) - .pipe( - babel({ - presets: [ - [ - '@babel/env', - { - targets: { node: 8 }, - }, - ], - ], - }), - ) - .pipe(sourcemaps.write('.')) - .pipe(gulp.dest(dest)) -} - -const compile = () => compileJS('src/main/**/*.js', 'dist/main') -const testCompile = gulp.series(compile, () => { - return compileJS('src/test/**/*.js', 'dist/test') -}) - -exports.browserify = gulp.series(compile, () => { - return browserify('./dist/main/minio.js', { - standalone: 'MinIO', - }) - .bundle() - .on('error', (err) => { - // eslint-disable-next-line no-console - console.log('Error : ' + err.message) - }) - .pipe(fs.createWriteStream('./dist/main/minio-browser.js')) -}) - -exports.test = gulp.series(testCompile, () => { - return gulp - .src('dist/test/**/*.js', { - read: false, - }) - .pipe( - mocha({ - exit: true, - reporter: 'spec', - ui: 'bdd', - }), - ) -}) - -function isFixed(file) { - return file.eslint != null && file.eslint.fixed -} - -exports.lint = () => { - const hasFixFlag = process.argv.slice(2).includes('--fix') - return ( - gulp - .src(['src/**/*.js', 'gulpfile.js']) - .pipe(eslint({ fix: hasFixFlag })) - .pipe(eslint.format()) - .pipe(eslint.failAfterError()) - // if fixed, write the file to dest - .pipe(gulpIf(isFixed, gulp.dest('src/'))) - ) -} - -exports.functionalTest = gulp.series(testCompile, () => { - return gulp - .src('dist/test/functional/*.js', { - read: false, - }) - .pipe( - mocha({ - exit: true, - reporter: 'spec', - ui: 'bdd', - }), - ) -}) - -exports.compile = compile -exports.testCompile = testCompile -exports.default = gulp.series(exports.test, exports.browserify) From 9844650d9865210dce6aeaba75795b7bb148a0b3 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 14:37:54 +0800 Subject: [PATCH 04/78] run on nodejs20 --- .github/workflows/nodejs-windows.yml | 2 +- .github/workflows/nodejs.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/nodejs-windows.yml b/.github/workflows/nodejs-windows.yml index 85ee8856..51abb81c 100644 --- a/.github/workflows/nodejs-windows.yml +++ b/.github/workflows/nodejs-windows.yml @@ -15,7 +15,7 @@ jobs: strategy: max-parallel: 3 matrix: - node_version: [12.x, 14.x, 16.x, 17.x, 18.x, 19.x] + node_version: [12.x, 14.x, 16.x, 17.x, 18.x, 20.x] os: [windows-latest] steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index f85c289a..7c1c8db4 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -15,7 +15,7 @@ jobs: strategy: max-parallel: 3 matrix: - node_version: [12.x, 14.x, 16.x, 17.x, 18.x, 19.x] + node_version: [12.x, 14.x, 16.x, 17.x, 18.x, 20.x] os: [ubuntu-latest] steps: - uses: actions/checkout@v3 From c03390b3934232c5b8774bdc9881c7050ab32f63 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 16:21:20 +0800 Subject: [PATCH 05/78] fix ci --- src/AssumeRoleProvider.js | 4 +++- src/CredentialProvider.js | 3 ++- src/Credentials.js | 3 ++- src/extensions.js | 3 ++- src/object-uploader.js | 3 ++- 5 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/AssumeRoleProvider.js b/src/AssumeRoleProvider.js index 455d7f52..5ffc795a 100644 --- a/src/AssumeRoleProvider.js +++ b/src/AssumeRoleProvider.js @@ -7,7 +7,7 @@ import { Credentials } from './Credentials.js' import { makeDateLong, parseXml, toSha256 } from './helpers.js' import { signV4ByServiceName } from './signing.js' -class AssumeRoleProvider extends CredentialProvider { +export class AssumeRoleProvider extends CredentialProvider { constructor({ stsEndpoint, accessKey, @@ -215,4 +215,6 @@ class AssumeRoleProvider extends CredentialProvider { } } +// deprecated default export, please use named exports. +// keep for backward compatibility. export default AssumeRoleProvider diff --git a/src/CredentialProvider.js b/src/CredentialProvider.js index 3e234034..8af798be 100644 --- a/src/CredentialProvider.js +++ b/src/CredentialProvider.js @@ -46,5 +46,6 @@ export class CredentialProvider { } } -// deprecated, keep for backward compatibility. +// deprecated default export, please use named exports. +// keep for backward compatibility. export default CredentialProvider diff --git a/src/Credentials.js b/src/Credentials.js index 4babcfa2..1eccf453 100644 --- a/src/Credentials.js +++ b/src/Credentials.js @@ -33,5 +33,6 @@ export class Credentials { } } -// deprecated, keep for backward compatibility. +// deprecated default export, please use named exports. +// keep for backward compatibility. export default Credentials diff --git a/src/extensions.js b/src/extensions.js index f0e98957..17184528 100644 --- a/src/extensions.js +++ b/src/extensions.js @@ -171,5 +171,6 @@ export class extensions { } } -// deprecated, keep for backward compatibility. +// deprecated default export, please use named exports. +// keep for backward compatibility. export default extensions diff --git a/src/object-uploader.js b/src/object-uploader.js index 6ac099c9..3b3a73f7 100644 --- a/src/object-uploader.js +++ b/src/object-uploader.js @@ -283,5 +283,6 @@ export class ObjectUploader extends Transform { } } -// deprecated, keep for backward compatibility. +// deprecated default export, please use named exports. +// keep for backward compatibility. export default ObjectUploader From dcc41910f073d0f128b2a1b89de84bbc31c488c4 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 17:53:08 +0800 Subject: [PATCH 06/78] eslinl rule diff --- .eslintrc.js | 95 +++++++++++++++++++++++++++------------------------- build.mjs | 1 + 2 files changed, 50 insertions(+), 46 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index a5bb022c..68a1d252 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -13,7 +13,7 @@ module.exports = { plugins: ['@typescript-eslint', 'simple-import-sort', 'unused-imports', 'import', 'unicorn'], parserOptions: { sourceType: 'module', - ecmaVersion: 2022, + ecmaVersion: 'latest', }, ignorePatterns: ['examples/**/*', 'dist/**/*'], settings: { @@ -35,50 +35,64 @@ module.exports = { }, }, rules: { + 'no-console': ['error'], + // "no-var": ["error"], + 'comma-dangle': 0, curly: ['error'], + 'prefer-const': 0, + 'no-template-curly-in-string': 'error', + // "quotes": ["error", "double"], + 'comma-spacing': 0, // ["error", { before: false, after: true }], + 'semi-spacing': 0, // ["warn", { before: false, after: true }], + 'space-before-blocks': 0, // ["warn", "always"], + 'switch-colon-spacing': ['warn', { after: true, before: false }], + 'keyword-spacing': 0, // ["warn", { before: true, after: true }], + 'template-curly-spacing': 0, // ["error", "never"], + 'rest-spread-spacing': 0, // ["error", "never"], + 'no-multi-spaces': 0, // ["warn", { ignoreEOLComments: false }], + // import node stdlib as `node:...` // don't worry, babel will remove these prefix. 'unicorn/prefer-node-protocol': 'error', 'simple-import-sort/imports': 'error', + 'simple-import-sort/exports': 'error', + indent: 'off', + 'linebreak-style': ['error', 'unix'], + semi: ['error', 'never'], + 'spaced-comment': [ + 'error', + 'always', + { + line: { + markers: ['/'], + exceptions: ['-', '+'], + }, + block: { + markers: ['!'], + exceptions: ['*'], + balanced: true, + }, + }, + ], + '@typescript-eslint/no-explicit-any': ['warn'], + + '@typescript-eslint/prefer-optional-chain': 0, // ["warn"], + 'no-empty-function': 0, + '@typescript-eslint/no-empty-function': 0, // ["warn"], + '@typescript-eslint/no-var-requires': 0, + '@typescript-eslint/no-this-alias': 0, + '@typescript-eslint/no-empty-interface': ['warn'], + + '@typescript-eslint/no-array-constructor': ['off'], + + 'no-extra-parens': 0, + '@typescript-eslint/no-extra-parens': 0, }, overrides: [ { files: './src/**/*', excludedFiles: ['tests/*.*'], rules: { - 'no-console': ['error'], - 'prefer-const': 0, - 'no-template-curly-in-string': 'error', - // "quotes": ["error", "double"], - 'comma-spacing': 0, // ["error", { before: false, after: true }], - 'semi-spacing': 0, // ["warn", { before: false, after: true }], - 'space-before-blocks': 0, // ["warn", "always"], - 'switch-colon-spacing': ['warn', { after: true, before: false }], - 'keyword-spacing': 0, // ["warn", { before: true, after: true }], - 'template-curly-spacing': 0, // ["error", "never"], - 'rest-spread-spacing': 0, // ["error", "never"], - 'no-multi-spaces': 0, // ["warn", { ignoreEOLComments: false }], - 'simple-import-sort/exports': 'error', - indent: 'off', - 'linebreak-style': ['error', 'unix'], - semi: ['error', 'never'], - 'spaced-comment': [ - 'error', - 'always', - { - line: { - markers: ['/'], - exceptions: ['-', '+'], - }, - block: { - markers: ['!'], - exceptions: ['*'], - balanced: true, - }, - }, - ], - - 'unused-imports/no-unused-imports': 'error', '@typescript-eslint/consistent-type-imports': [ 'error', { @@ -87,24 +101,13 @@ module.exports = { }, ], - '@typescript-eslint/no-explicit-any': ['warn'], - - '@typescript-eslint/prefer-optional-chain': 0, // ["warn"], - '@typescript-eslint/no-empty-function': 0, // ["warn"], - '@typescript-eslint/no-var-requires': 0, - '@typescript-eslint/no-this-alias': 0, - '@typescript-eslint/no-empty-interface': ['warn'], - - '@typescript-eslint/no-array-constructor': ['off'], - - 'no-extra-parens': 0, - '@typescript-eslint/no-extra-parens': 0, 'import/extensions': ['error', 'always'], }, }, { files: ['./src/**/*', './tests/**/*'], rules: { + 'unused-imports/no-unused-imports': 'error', 'import/no-commonjs': 'error', 'import/no-amd': 'error', }, diff --git a/build.mjs b/build.mjs index bba89db1..2e073959 100644 --- a/build.mjs +++ b/build.mjs @@ -1,3 +1,4 @@ +/* eslint-disable no-console */ import { execSync } from 'node:child_process' import * as fs from 'node:fs' import * as fsp from 'node:fs/promises' From 56af6f2d61b5aa57ae79e4da6b15498106e3a55d Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 18:02:08 +0800 Subject: [PATCH 07/78] add npm run build to "prepublishOnly" --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 6368372e..55df7441 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,8 @@ "test": "mocha", "lint": "eslint --ext js,mjs,cjs,ts ./", "lint-fix": "eslint --ext js,mjs,cjs,ts ./ --fix", - "prepublishOnly": "npm test", + "prepublish": "", + "prepublishOnly": "npm test && npm run build", "functional": "mocha tests/functional/functional-tests.js", "format": "prettier -w .", "format-check": "prettier --list-different .", From d6289bea5deae233cbf148adaf4aeb18e1d9906d Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 20:20:43 +0800 Subject: [PATCH 08/78] add eslint rule to check import/export --- .eslintrc.js | 3 +++ src/minio.js | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.eslintrc.js b/.eslintrc.js index 68a1d252..9f58a93d 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -87,6 +87,9 @@ module.exports = { 'no-extra-parens': 0, '@typescript-eslint/no-extra-parens': 0, + 'import/namespace': 'error', + 'import/default': 'error', + 'import/named': 'error', }, overrides: [ { diff --git a/src/minio.js b/src/minio.js index 7b3e05eb..2567e254 100644 --- a/src/minio.js +++ b/src/minio.js @@ -923,7 +923,7 @@ export class Client { // * `callback(err)` _function_: callback function is called with non `null` value in case of error removeIncompleteUpload(bucketName, objectName, cb) { if (!isValidBucketName(bucketName)) { - throw new errors.isValidBucketNameError('Invalid bucket name: ' + bucketName) + throw new errors.IsValidBucketNameError('Invalid bucket name: ' + bucketName) } if (!isValidObjectName(objectName)) { throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) From 8edcee8873a5ecd1959431a50051ffe9931c4d58 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 20:27:02 +0800 Subject: [PATCH 09/78] add eslint rule to disable default export --- .eslintrc.js | 2 ++ src/AssumeRoleProvider.js | 1 + src/CredentialProvider.js | 1 + src/Credentials.js | 1 + src/extensions.js | 1 + src/object-uploader.js | 1 + 6 files changed, 7 insertions(+) diff --git a/.eslintrc.js b/.eslintrc.js index 9f58a93d..619182e7 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -90,6 +90,8 @@ module.exports = { 'import/namespace': 'error', 'import/default': 'error', 'import/named': 'error', + // default export confuse esm/cjs interop + 'import/no-default-export': 'error', }, overrides: [ { diff --git a/src/AssumeRoleProvider.js b/src/AssumeRoleProvider.js index 5ffc795a..25411f85 100644 --- a/src/AssumeRoleProvider.js +++ b/src/AssumeRoleProvider.js @@ -217,4 +217,5 @@ export class AssumeRoleProvider extends CredentialProvider { // deprecated default export, please use named exports. // keep for backward compatibility. +// eslint-disable-next-line import/no-default-export export default AssumeRoleProvider diff --git a/src/CredentialProvider.js b/src/CredentialProvider.js index 8af798be..c1d35494 100644 --- a/src/CredentialProvider.js +++ b/src/CredentialProvider.js @@ -48,4 +48,5 @@ export class CredentialProvider { // deprecated default export, please use named exports. // keep for backward compatibility. +// eslint-disable-next-line import/no-default-export export default CredentialProvider diff --git a/src/Credentials.js b/src/Credentials.js index 1eccf453..ad99155f 100644 --- a/src/Credentials.js +++ b/src/Credentials.js @@ -35,4 +35,5 @@ export class Credentials { // deprecated default export, please use named exports. // keep for backward compatibility. +// eslint-disable-next-line import/no-default-export export default Credentials diff --git a/src/extensions.js b/src/extensions.js index 17184528..5e04a930 100644 --- a/src/extensions.js +++ b/src/extensions.js @@ -173,4 +173,5 @@ export class extensions { // deprecated default export, please use named exports. // keep for backward compatibility. +// eslint-disable-next-line import/no-default-export export default extensions diff --git a/src/object-uploader.js b/src/object-uploader.js index 3b3a73f7..2fdf6606 100644 --- a/src/object-uploader.js +++ b/src/object-uploader.js @@ -285,4 +285,5 @@ export class ObjectUploader extends Transform { // deprecated default export, please use named exports. // keep for backward compatibility. +// eslint-disable-next-line import/no-default-export export default ObjectUploader From e1ad69791f2003f469bdd8aa22c22a5b93781111 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 20:34:19 +0800 Subject: [PATCH 10/78] simplify eslint rule --- .eslintrc.js | 27 ++++++++++----------------- 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index 619182e7..97a72d1c 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -92,29 +92,22 @@ module.exports = { 'import/named': 'error', // default export confuse esm/cjs interop 'import/no-default-export': 'error', + 'import/extensions': ['error', 'always'], + '@typescript-eslint/consistent-type-imports': [ + 'error', + { + prefer: 'type-imports', + fixStyle: 'separate-type-imports', + }, + ], + 'unused-imports/no-unused-imports': 'error', + 'import/no-amd': 'error', }, overrides: [ - { - files: './src/**/*', - excludedFiles: ['tests/*.*'], - rules: { - '@typescript-eslint/consistent-type-imports': [ - 'error', - { - prefer: 'type-imports', - fixStyle: 'separate-type-imports', - }, - ], - - 'import/extensions': ['error', 'always'], - }, - }, { files: ['./src/**/*', './tests/**/*'], rules: { - 'unused-imports/no-unused-imports': 'error', 'import/no-commonjs': 'error', - 'import/no-amd': 'error', }, }, { From ad13a82256d107a5be07584a504df237ec16f029 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 20:35:51 +0800 Subject: [PATCH 11/78] Update CONTRIBUTING.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 81d1c28f..fc6dff2b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,4 +28,4 @@ We are currently migrating from JavaScript to TypeScript, so **All Source should That means only use nodejs `require` in js config file like `.eslintrc.js` You should always fully specify your import path extension, -which means you should write `import * from "errors.ts"` for `errors.ts` file, do not write `import "errors.js"`. +which means you should write `import {} from "errors.ts"` for `errors.ts` file, do not write `import {} from "errors.js"`. From 6783ac0015bfa9577bbbb7b583594cce0713cf55 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 21:21:44 +0800 Subject: [PATCH 12/78] tsconfig --- tsconfig.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tsconfig.json b/tsconfig.json index bba8d185..1e3d9328 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -3,17 +3,17 @@ "target": "ESNext", "moduleResolution": "Node", "module": "ESNext", - "strict": false, - "noImplicitAny": false, + "strict": true, + "noImplicitAny": true, "strictNullChecks": true, + "noUncheckedIndexedAccess": true, "allowSyntheticDefaultImports": true, "esModuleInterop": true, "allowImportingTsExtensions": true, "declaration": true, - "declarationMap": true, + "declarationMap": false, "emitDeclarationOnly": true, - "sourceMap": true, - "pretty": true, + "sourceMap": false, "outDir": "./dist/main/", "rootDir": "./src" } From 8944f936c7b45d494dd28223e1c023787dfc2636 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 21:24:08 +0800 Subject: [PATCH 13/78] declarationMap --- tsconfig.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tsconfig.json b/tsconfig.json index 1e3d9328..0bf624d3 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -11,9 +11,9 @@ "esModuleInterop": true, "allowImportingTsExtensions": true, "declaration": true, - "declarationMap": false, + "declarationMap": true, "emitDeclarationOnly": true, - "sourceMap": false, + "sourceMap": true, "outDir": "./dist/main/", "rootDir": "./src" } From 73c2144f008cc93879edd2f84a40ac40902536b2 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 21:34:04 +0800 Subject: [PATCH 14/78] migrate --- ...eRoleProvider.js => AssumeRoleProvider.ts} | 175 +- ...ntialProvider.js => CredentialProvider.ts} | 32 +- src/Credentials.js | 39 - src/Credentials.ts | 47 + src/as-callback.ts | 32 + src/async.ts | 21 + src/base-error.ts | 30 - src/client.ts | 2067 +++++++++ src/copyConditions.ts | 37 + src/errors.ts | 106 +- src/{extensions.js => extensions.ts} | 72 +- src/{helpers.js => helpers.ts} | 478 +- src/minio.js | 3987 ----------------- src/minio.ts | 41 + src/{notification.js => notification.ts} | 213 +- src/object-uploader.js | 289 -- src/postPolicy.ts | 104 + src/qs.ts | 7 + src/request.ts | 29 + src/response.ts | 26 + src/{s3-endpoints.js => s3-endpoints.ts} | 12 +- src/{signing.js => signing.ts} | 78 +- src/streamify.ts | 30 + src/transformers.js | 263 -- src/transformers.ts | 161 + src/type.ts | 239 + src/typed-client.ts | 1708 +++++++ src/typed-client2.ts | 941 ++++ src/upload.ts | 0 src/{xml-parsers.js => xml-parsers.ts} | 441 +- tests/functional/functional-tests.js | 285 +- tests/unit/test.js | 81 +- types/minio.d.ts | 775 ---- 33 files changed, 6641 insertions(+), 6205 deletions(-) rename src/{AssumeRoleProvider.js => AssumeRoleProvider.ts} (52%) rename src/{CredentialProvider.js => CredentialProvider.ts} (51%) delete mode 100644 src/Credentials.js create mode 100644 src/Credentials.ts create mode 100644 src/as-callback.ts create mode 100644 src/async.ts delete mode 100644 src/base-error.ts create mode 100644 src/client.ts create mode 100644 src/copyConditions.ts rename src/{extensions.js => extensions.ts} (80%) rename src/{helpers.js => helpers.ts} (59%) delete mode 100644 src/minio.js create mode 100644 src/minio.ts rename src/{notification.js => notification.ts} (51%) delete mode 100644 src/object-uploader.js create mode 100644 src/postPolicy.ts create mode 100644 src/qs.ts create mode 100644 src/request.ts create mode 100644 src/response.ts rename src/{s3-endpoints.js => s3-endpoints.ts} (87%) rename src/{signing.js => signing.ts} (84%) create mode 100644 src/streamify.ts delete mode 100644 src/transformers.js create mode 100644 src/transformers.ts create mode 100644 src/type.ts create mode 100644 src/typed-client.ts create mode 100644 src/typed-client2.ts create mode 100644 src/upload.ts rename src/{xml-parsers.js => xml-parsers.ts} (59%) diff --git a/src/AssumeRoleProvider.js b/src/AssumeRoleProvider.ts similarity index 52% rename from src/AssumeRoleProvider.js rename to src/AssumeRoleProvider.ts index 25411f85..6d047563 100644 --- a/src/AssumeRoleProvider.js +++ b/src/AssumeRoleProvider.ts @@ -1,13 +1,53 @@ -import * as Http from 'node:http' -import * as Https from 'node:https' +import type http from 'node:http' import { URL, URLSearchParams } from 'node:url' -import { CredentialProvider } from './CredentialProvider.js' -import { Credentials } from './Credentials.js' -import { makeDateLong, parseXml, toSha256 } from './helpers.js' -import { signV4ByServiceName } from './signing.js' +import { CredentialProvider } from './CredentialProvider.ts' +import { Credentials } from './Credentials.ts' +import { makeDateLong, parseXml, toSha256 } from './helpers.ts' +import { request } from './request.ts' +import { readAsString } from './response.ts' +import { signV4ByServiceName } from './signing.ts' + +type CredentialResponse = { + ErrorResponse?: { + Error?: { + Code?: string + Message?: string + } + } + + AssumeRoleResponse?: { + AssumeRoleResult?: { + Credentials?: { + AccessKeyId: string | undefined + SecretAccessKey: string | undefined + SessionToken: string | undefined + Expiration: string | undefined + } + } + } +} export class AssumeRoleProvider extends CredentialProvider { + private stsEndpoint: string + private accessKey: string + private secretKey: string + private durationSeconds: number + private sessionToken: string + private policy: string + private region: string + private roleArn: string + private roleSessionName: string + private externalId: string + private token: string + private webIdentityToken: string + private action: string + + private _credentials: Credentials | null + private expirySeconds: number | null + private accessExpiresAt: string | null + private transportAgent?: http.Agent + constructor({ stsEndpoint, accessKey, @@ -23,6 +63,21 @@ export class AssumeRoleProvider extends CredentialProvider { webIdentityToken, action = 'AssumeRole', transportAgent = undefined, + }: { + stsEndpoint: string + accessKey: string + secretKey: string + durationSeconds: number + sessionToken: string + policy: string + region?: string + roleArn: string + roleSessionName: string + externalId: string + token: string + webIdentityToken: string + action?: string + transportAgent?: http.Agent }) { super({}) @@ -39,6 +94,7 @@ export class AssumeRoleProvider extends CredentialProvider { this.webIdentityToken = webIdentityToken this.action = action this.sessionToken = sessionToken + // By default, nodejs uses a global agent if the 'agent' property // is set to undefined. Otherwise, it's okay to assume the users // know what they're doing if they specify a custom transport agent. @@ -47,12 +103,16 @@ export class AssumeRoleProvider extends CredentialProvider { /** * Internal Tracking variables */ - this.credentials = null + this._credentials = null this.expirySeconds = null this.accessExpiresAt = null } - getRequestConfig() { + getRequestConfig(): { + isHttp: boolean + requestOptions: http.RequestOptions + requestData: string + } { const url = new URL(this.stsEndpoint) const hostValue = url.hostname const portValue = url.port @@ -62,13 +122,13 @@ export class AssumeRoleProvider extends CredentialProvider { qryParams.set('Version', '2011-06-15') const defaultExpiry = 900 - let expirySeconds = parseInt(this.durationSeconds) + let expirySeconds = parseInt(this.durationSeconds as unknown as string) if (expirySeconds < defaultExpiry) { expirySeconds = defaultExpiry } this.expirySeconds = expirySeconds // for calculating refresh of credentials. - qryParams.set('DurationSeconds', this.expirySeconds) + qryParams.set('DurationSeconds', this.expirySeconds.toString()) if (this.policy) { qryParams.set('Policy', this.policy) @@ -97,9 +157,6 @@ export class AssumeRoleProvider extends CredentialProvider { const date = new Date() - /** - * Nodejs's Request Configuration. - */ const requestOptions = { hostname: hostValue, port: portValue, @@ -108,16 +165,22 @@ export class AssumeRoleProvider extends CredentialProvider { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded', - 'content-length': urlParams.length, + 'content-length': urlParams.length.toString(), host: hostValue, 'x-amz-date': makeDateLong(date), 'x-amz-content-sha256': contentSha256, - }, + } as Record, agent: this.transportAgent, - } + } satisfies http.RequestOptions - const authorization = signV4ByServiceName(requestOptions, this.accessKey, this.secretKey, this.region, date, 'sts') - requestOptions.headers.authorization = authorization + requestOptions.headers.authorization = signV4ByServiceName( + requestOptions, + this.accessKey, + this.secretKey, + this.region, + date, + 'sts', + ) return { requestOptions, @@ -126,50 +189,36 @@ export class AssumeRoleProvider extends CredentialProvider { } } - async performRequest() { + async performRequest(): Promise { const reqObj = this.getRequestConfig() const requestOptions = reqObj.requestOptions const requestData = reqObj.requestData const isHttp = reqObj.isHttp - const Transport = isHttp ? Http : Https - - const promise = new Promise((resolve, reject) => { - const requestObj = Transport.request(requestOptions, (resp) => { - let resChunks = [] - resp.on('data', (rChunk) => { - resChunks.push(rChunk) - }) - resp.on('end', () => { - let body = Buffer.concat(resChunks).toString() - const xmlobj = parseXml(body) - resolve(xmlobj) - }) - resp.on('error', (err) => { - reject(err) - }) - }) - requestObj.on('error', (e) => { - reject(e) - }) - requestObj.write(requestData) - requestObj.end() - }) - return promise + + const res = await request(requestOptions, isHttp, requestData) + + const body = await readAsString(res) + + return parseXml(body) } - parseCredentials(respObj = {}) { + parseCredentials(respObj: CredentialResponse = {}) { if (respObj.ErrorResponse) { - throw new Error('Unable to obtain credentials:', respObj) + throw new Error( + `Unable to obtain credentials: ${respObj.ErrorResponse?.Error?.Code} ${respObj.ErrorResponse?.Error?.Message}`, + { cause: respObj }, + ) } + const { AssumeRoleResponse: { AssumeRoleResult: { Credentials: { - AccessKeyId: accessKey, - SecretAccessKey: secretKey, - SessionToken: sessionToken, - Expiration: expiresAt, + AccessKeyId: accessKey = undefined, + SecretAccessKey: secretKey = undefined, + SessionToken: sessionToken = undefined, + Expiration: expiresAt = null, } = {}, } = {}, } = {}, @@ -184,38 +233,32 @@ export class AssumeRoleProvider extends CredentialProvider { }) this.setCredentials(newCreds) - return this.credentials + return this._credentials } - async refreshCredentials() { + async refreshCredentials(): Promise { try { const assumeRoleCredentials = await this.performRequest() - this.credentials = this.parseCredentials(assumeRoleCredentials) + this._credentials = this.parseCredentials(assumeRoleCredentials) } catch (err) { - this.credentials = null + this._credentials = null } - return this.credentials + return this._credentials } - async getCredentials() { - let credConfig - if (!this.credentials || (this.credentials && this.isAboutToExpire())) { + async getCredentials(): Promise { + let credConfig: Credentials | null + if (!this._credentials || (this._credentials && this.isAboutToExpire())) { credConfig = await this.refreshCredentials() } else { - credConfig = this.credentials + credConfig = this._credentials } return credConfig } isAboutToExpire() { - const expiresAt = new Date(this.accessExpiresAt) + const expiresAt = new Date(this.accessExpiresAt!) const provisionalExpiry = new Date(Date.now() + 1000 * 10) // check before 10 seconds. - const isAboutToExpire = provisionalExpiry > expiresAt - return isAboutToExpire + return provisionalExpiry > expiresAt } } - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default AssumeRoleProvider diff --git a/src/CredentialProvider.js b/src/CredentialProvider.ts similarity index 51% rename from src/CredentialProvider.js rename to src/CredentialProvider.ts index c1d35494..99aaebb0 100644 --- a/src/CredentialProvider.js +++ b/src/CredentialProvider.ts @@ -1,7 +1,17 @@ -import { Credentials } from './Credentials.js' +import { Credentials } from './Credentials.ts' export class CredentialProvider { - constructor({ accessKey, secretKey, sessionToken }) { + private credentials: Credentials + + constructor({ + accessKey, + secretKey, + sessionToken, + }: { + accessKey?: string + secretKey?: string + sessionToken?: string + }) { this.credentials = new Credentials({ accessKey, secretKey, @@ -9,19 +19,20 @@ export class CredentialProvider { }) } - getCredentials() { + // eslint-disable-next-line @typescript-eslint/require-await + async getCredentials(): Promise { return this.credentials.get() } - setCredentials(credentials) { + setCredentials(credentials: Credentials) { if (credentials instanceof Credentials) { this.credentials = credentials } else { - throw new Error('Unable to set Credentials . it should be an instance of Credentials class') + throw new Error('Unable to set Credentials. it should be an instance of Credentials class') } } - setAccessKey(accessKey) { + setAccessKey(accessKey: string) { this.credentials.setAccessKey(accessKey) } @@ -29,7 +40,7 @@ export class CredentialProvider { return this.credentials.getAccessKey() } - setSecretKey(secretKey) { + setSecretKey(secretKey: string) { this.credentials.setSecretKey(secretKey) } @@ -37,7 +48,7 @@ export class CredentialProvider { return this.credentials.getSecretKey() } - setSessionToken(sessionToken) { + setSessionToken(sessionToken: string) { this.credentials.setSessionToken(sessionToken) } @@ -45,8 +56,3 @@ export class CredentialProvider { return this.credentials.getSessionToken() } } - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default CredentialProvider diff --git a/src/Credentials.js b/src/Credentials.js deleted file mode 100644 index ad99155f..00000000 --- a/src/Credentials.js +++ /dev/null @@ -1,39 +0,0 @@ -export class Credentials { - constructor({ accessKey, secretKey, sessionToken }) { - this.accessKey = accessKey - this.secretKey = secretKey - this.sessionToken = sessionToken - } - - setAccessKey(accessKey) { - this.accessKey = accessKey - } - getAccessKey() { - return this.accessKey - } - setSecretKey(secretKey) { - this.secretKey = secretKey - } - getSecretKey() { - return this.secretKey - } - setSessionToken(sessionToken) { - this.sessionToken = sessionToken - } - getSessionToken() { - return this.sessionToken - } - - get() { - return { - accessKey: this.accessKey, - secretKey: this.secretKey, - sessionToken: this.sessionToken, - } - } -} - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default Credentials diff --git a/src/Credentials.ts b/src/Credentials.ts new file mode 100644 index 00000000..78e07388 --- /dev/null +++ b/src/Credentials.ts @@ -0,0 +1,47 @@ +export class Credentials { + public accessKey?: string + public secretKey?: string + public sessionToken?: string + + constructor({ + accessKey, + secretKey, + sessionToken, + }: { + accessKey?: string + secretKey?: string + sessionToken?: string + }) { + this.accessKey = accessKey + this.secretKey = secretKey + this.sessionToken = sessionToken + } + + setAccessKey(accessKey: string) { + this.accessKey = accessKey + } + + getAccessKey() { + return this.accessKey + } + + setSecretKey(secretKey: string) { + this.secretKey = secretKey + } + + getSecretKey() { + return this.secretKey + } + + setSessionToken(sessionToken: string) { + this.sessionToken = sessionToken + } + + getSessionToken() { + return this.sessionToken + } + + get(): Credentials { + return this + } +} diff --git a/src/as-callback.ts b/src/as-callback.ts new file mode 100644 index 00000000..a829d8b3 --- /dev/null +++ b/src/as-callback.ts @@ -0,0 +1,32 @@ +import { isFunction } from './helpers.ts' + +export function asCallback( + cb: undefined | ((err: unknown | null, result: T) => void), + promise: Promise, +): Promise | void { + if (cb === undefined) { + return promise + } + + if (!isFunction(cb)) { + throw new TypeError(`callback should be of type "function", got ${cb}`) + } + + promise.then( + (result) => { + cb(null, result) + }, + (err) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + cb(err) + }, + ) +} + +export function asCallbackFn( + cb: undefined | ((err: unknown | null, result: T) => void), + asyncFn: () => Promise, +): Promise | void { + return asCallback(cb, asyncFn()) +} diff --git a/src/async.ts b/src/async.ts new file mode 100644 index 00000000..b3ac9cb7 --- /dev/null +++ b/src/async.ts @@ -0,0 +1,21 @@ +// promise helper for stdlibl + +import * as fs from 'node:fs' +import * as stream from 'node:stream' +import { promisify } from 'node:util' + +export const fsp = { + fstat: promisify(fs.fstat), + stat: promisify(fs.stat), + lstat: promisify(fs.lstat), + open: promisify(fs.open), + fclose: promisify(fs.close), + rename: fs.promises.rename, + readfile: promisify(fs.readFile), + read: promisify(fs.read), +} + +export const streamPromise = { + // node:stream/promises Added in: v15.0.0 + pipeline: promisify(stream.pipeline), +} diff --git a/src/base-error.ts b/src/base-error.ts deleted file mode 100644 index d3947b6d..00000000 --- a/src/base-error.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/// - -/** - * @internal - */ -export class ExtendableError extends Error { - constructor(message?: string, opt?: ErrorOptions) { - // error Option {cause?: unknown} is a 'nice to have', - // don't use it internally - super(message, opt) - // set error name, otherwise it's always 'Error' - this.name = this.constructor.name - } -} diff --git a/src/client.ts b/src/client.ts new file mode 100644 index 00000000..64361ef5 --- /dev/null +++ b/src/client.ts @@ -0,0 +1,2067 @@ +import * as crypto from 'node:crypto' +import * as fs from 'node:fs' +import type { IncomingMessage } from 'node:http' +import * as http from 'node:http' +import * as https from 'node:https' +import * as path from 'node:path' +import * as stream from 'node:stream' + +import async from 'async' +import BlockStream2 from 'block-stream2' +import { isBrowser } from 'browser-or-node' +import _ from 'lodash' +import { mkdirp } from 'mkdirp' +import xml2js from 'xml2js' + +import { asCallback, asCallbackFn } from './as-callback.ts' +import { fsp, streamPromise } from './async.ts' +import { CredentialProvider } from './CredentialProvider.ts' +import * as errors from './errors.ts' +import { S3Error } from './errors.ts' +import { extensions } from './extensions.ts' +import type { AnyFunction, MetaData } from './helpers.ts' +import { + DEFAULT_REGION, + extractMetadata, + getVersionId, + insertContentType, + isAmazonEndpoint, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, + isValidBucketName, + isValidEndpoint, + isValidObjectName, + isValidPort, + isValidPrefix, + isVirtualHostStyle, + makeDateLong, + pipesetup, + prependXAMZMeta, + readableStream, + sanitizeETag, + toSha256, + uriEscape, + uriResourceEscape, +} from './helpers.ts' +import { qs } from './qs.ts' +import { drainResponse, readAsBuffer, readAsString } from './response.ts' +import type { Region } from './s3-endpoints.ts' +import { getS3Endpoint } from './s3-endpoints.ts' +import { signV4 } from './signing.ts' +import * as transformers from './transformers.ts' +import type { + Binary, + BucketItemFromList, + BucketItemStat, + GetObjectOpt, + IRequest, + MakeBucketOpt, + NoResultCallback, + RequestHeaders, + ResultCallback, + StatObjectOpts, + UploadedObjectInfo, +} from './type.ts' +import type { Part } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' + +const requestOptionProperties = [ + 'agent', + 'ca', + 'cert', + 'ciphers', + 'clientCertEngine', + 'crl', + 'dhparam', + 'ecdhCurve', + 'family', + 'honorCipherOrder', + 'key', + 'passphrase', + 'pfx', + 'rejectUnauthorized', + 'secureOptions', + 'secureProtocol', + 'servername', + 'sessionIdContext', +] as const + +export interface ClientOptions { + endPoint: string + accessKey: string + secretKey: string + useSSL?: boolean + port?: number + region?: Region + transport?: typeof http | typeof https + sessionToken?: string + partSize?: number + pathStyle?: boolean + credentialsProvider?: CredentialProvider + s3AccelerateEndpoint?: string + transportAgent?: http.Agent +} + +// will be replaced by rollup plugin +const version = process.env.MINIO_JS_PACKAGE_VERSION || 'development' +const Package = { version } + +export type RequestMethod = 'HEAD' | 'GET' | 'POST' | 'DELETE' | 'PUT' +export type RequestOption = Partial & { + method: RequestMethod + bucketName?: string + objectName?: string + region?: string + query?: string + pathStyle?: boolean +} + +/** + * @internal + */ +export function findCallback(args: unknown[]): [A, T | undefined] { + const index = args.findIndex((v) => isFunction(v)) + if (index === -1) { + return [args as A, undefined] + } + + return [args.slice(0, index) as A, args[index] as T] +} + +export class Client { + protected transport: typeof http | typeof https + protected host: string + protected port: number + protected protocol: string + protected accessKey: string + protected secretKey: string + protected sessionToken?: string + protected userAgent: string + protected anonymous: boolean + protected pathStyle: boolean + protected regionMap: Record + public region?: string + protected credentialsProvider?: CredentialProvider + partSize: number = 64 * 1024 * 1024 + protected overRidePartSize?: boolean + + protected maximumPartSize = 5 * 1024 * 1024 * 1024 + maxObjectSize = 5 * 1024 * 1024 * 1024 * 1024 + public enableSHA256: boolean + protected s3AccelerateEndpoint?: string + protected reqOptions: Record + + private readonly clientExtensions: extensions + private logStream?: stream.Writable + private transportAgent: http.Agent + + constructor(params: ClientOptions) { + // @ts-expect-error deprecated property + if (params.secure !== undefined) { + throw new Error('"secure" option deprecated, "useSSL" should be used instead') + } + // Default values if not specified. + if (params.useSSL === undefined) { + params.useSSL = true + } + if (!params.port) { + params.port = 0 + } + // Validate input params. + if (!isValidEndpoint(params.endPoint)) { + throw new errors.InvalidEndpointError(`Invalid endPoint : ${params.endPoint}`) + } + if (!isValidPort(params.port)) { + throw new errors.InvalidArgumentError(`Invalid port : ${params.port}`) + } + if (!isBoolean(params.useSSL)) { + throw new errors.InvalidArgumentError( + `Invalid useSSL flag type : ${params.useSSL}, expected to be of type "boolean"`, + ) + } + + // Validate region only if its set. + if (params.region) { + if (!isString(params.region)) { + throw new errors.InvalidArgumentError(`Invalid region : ${params.region}`) + } + } + + const host = params.endPoint.toLowerCase() + let port = params.port + let protocol: string + let transport + let transportAgent: http.Agent + // Validate if configuration is not using SSL + // for constructing relevant endpoints. + if (params.useSSL) { + // Defaults to secure. + transport = https + protocol = 'https:' + port = port || 443 + transportAgent = https.globalAgent + } else { + transport = http + protocol = 'http:' + port = port || 80 + transportAgent = http.globalAgent + } + + // if custom transport is set, use it. + if (params.transport) { + if (!isObject(params.transport)) { + throw new errors.InvalidArgumentError( + `Invalid transport type : ${params.transport}, expected to be type "object"`, + ) + } + transport = params.transport + } + + // if custom transport agent is set, use it. + if (params.transportAgent) { + if (!isObject(params.transportAgent)) { + throw new errors.InvalidArgumentError( + `Invalid transportAgent type: ${params.transportAgent}, expected to be type "object"`, + ) + } + + transportAgent = params.transportAgent + } + + // User Agent should always following the below style. + // Please open an issue to discuss any new changes here. + // + // MinIO (OS; ARCH) LIB/VER APP/VER + // + const libraryComments = `(${process.platform}; ${process.arch})` + const libraryAgent = `MinIO ${libraryComments} minio-js/${Package.version}` + // User agent block ends. + + this.transport = transport + this.transportAgent = transportAgent + this.host = host + this.port = port + this.protocol = protocol + this.accessKey = params.accessKey + this.secretKey = params.secretKey + this.sessionToken = params.sessionToken + this.userAgent = `${libraryAgent}` + + // Default path style is true + if (params.pathStyle === undefined) { + this.pathStyle = true + } else { + this.pathStyle = params.pathStyle + } + + if (!this.accessKey) { + this.accessKey = '' + } + if (!this.secretKey) { + this.secretKey = '' + } + this.anonymous = !this.accessKey || !this.secretKey + + if (params.credentialsProvider) { + this.credentialsProvider = params.credentialsProvider + void this.checkAndRefreshCreds() + } + + this.regionMap = {} + if (params.region) { + this.region = params.region + } + + if (params.partSize) { + this.partSize = params.partSize + this.overRidePartSize = true + } + if (this.partSize < 5 * 1024 * 1024) { + throw new errors.InvalidArgumentError(`Part size should be greater than 5MB`) + } + if (this.partSize > 5 * 1024 * 1024 * 1024) { + throw new errors.InvalidArgumentError(`Part size should be less than 5GB`) + } + + // SHA256 is enabled only for authenticated http requests. If the request is authenticated + // and the connection is https we use x-amz-content-sha256=UNSIGNED-PAYLOAD + // header for signature calculation. + this.enableSHA256 = !this.anonymous && !params.useSSL + + this.s3AccelerateEndpoint = params.s3AccelerateEndpoint || undefined + this.reqOptions = {} + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.clientExtensions = new extensions(this) + } + + /** + * This is s3 Specific and does not hold validity in any other Object storage. + */ + private getAccelerateEndPointIfSet(bucketName: string, objectName?: string) { + if (!isEmpty(this.s3AccelerateEndpoint) && !isEmpty(bucketName) && !isEmpty(objectName)) { + // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html + // Disable transfer acceleration for non-compliant bucket names. + if (bucketName.includes('.')) { + throw new Error(`Transfer Acceleration is not supported for non compliant bucket:${bucketName}`) + } + // If transfer acceleration is requested set new host. + // For more details about enabling transfer acceleration read here. + // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html + return this.s3AccelerateEndpoint + } + return false + } + + /** + * @param endPoint - valid S3 acceleration end point + */ + public setS3TransferAccelerate(endPoint: string) { + this.s3AccelerateEndpoint = endPoint + } + + /** + * Sets the supported request options. + */ + public setRequestOptions(options: Pick) { + // TODO: add options type details + if (!isObject(options)) { + throw new TypeError('request options should be of type "object"') + } + this.reqOptions = _.pick(options, requestOptionProperties) + } + + /** + * returns options object that can be used with http.request() + * Takes care of constructing virtual-host-style or path-style hostname + */ + protected getRequestOptions(opts: RequestOption): IRequest & { host: string; headers: Record } { + const method = opts.method + const region = opts.region + const bucketName = opts.bucketName + let objectName = opts.objectName + const headers = opts.headers + const query = opts.query + + let reqOptions = { + method, + headers: {} as RequestHeaders, + protocol: this.protocol, + // If custom transportAgent was supplied earlier, we'll inject it here + agent: this.transportAgent, + } + + // Verify if virtual host supported. + let virtualHostStyle + if (bucketName) { + virtualHostStyle = isVirtualHostStyle(this.host, this.protocol, bucketName, this.pathStyle) + } + + let path = '/' + let host = this.host + + let port: undefined | number + if (this.port) { + port = this.port + } + + if (objectName) { + objectName = `${uriResourceEscape(objectName)}` + } + + // For Amazon S3 endpoint, get endpoint based on region. + if (isAmazonEndpoint(host)) { + const accelerateEndPoint = this.getAccelerateEndPointIfSet(bucketName!, objectName) + if (accelerateEndPoint) { + host = `${accelerateEndPoint}` + } else { + host = getS3Endpoint(region!) + } + } + + if (virtualHostStyle && !opts.pathStyle) { + // For all hosts which support virtual host style, `bucketName` + // is part of the hostname in the following format: + // + // var host = 'bucketName.example.com' + // + if (bucketName) { + host = `${bucketName}.${host}` + } + if (objectName) { + path = `/${objectName}` + } + } else { + // For all S3 compatible storage services we will fallback to + // path style requests, where `bucketName` is part of the URI + // path. + if (bucketName) { + path = `/${bucketName}` + } + if (objectName) { + path = `/${bucketName}/${objectName}` + } + } + + if (query) { + path += `?${query}` + } + reqOptions.headers.host = host + if ((reqOptions.protocol === 'http:' && port !== 80) || (reqOptions.protocol === 'https:' && port !== 443)) { + reqOptions.headers.host = `${host}:${port}` + } + reqOptions.headers['user-agent'] = this.userAgent + if (headers) { + // have all header keys in lower case - to make signing easy + for (const [k, v] of Object.entries(headers)) { + reqOptions.headers[k.toLowerCase()] = v + } + } + + // Use any request option specified in minioClient.setRequestOptions() + reqOptions = Object.assign({}, this.reqOptions, reqOptions) + + return { + ...reqOptions, + headers: _.mapValues(reqOptions.headers, (v) => v.toString()), + host, + port, + path, + } satisfies https.RequestOptions + } + + /** + * Set application specific information. + * + * Generates User-Agent in the following style. + * + * MinIO (OS; ARCH) LIB/VER APP/VER + * + * @param appName - Application name. + * @param appVersion - Application version. + */ + public setAppInfo(appName: string, appVersion: string) { + if (!isString(appName)) { + throw new TypeError(`Invalid appName: ${appName}`) + } + if (appName.trim() === '') { + throw new errors.InvalidArgumentError('Input appName cannot be empty.') + } + if (!isString(appVersion)) { + throw new TypeError(`Invalid appVersion: ${appVersion}`) + } + if (appVersion.trim() === '') { + throw new errors.InvalidArgumentError('Input appVersion cannot be empty.') + } + this.userAgent = `${this.userAgent} ${appName}/${appVersion}` + } + + /** + * Calculate part size given the object size. Part size will be at least this.partSize + * + * @param size - total size + * + * @internal + */ + public calculatePartSize(size: number) { + if (!isNumber(size)) { + throw new TypeError('size should be of type "number"') + } + if (size > this.maxObjectSize) { + throw new TypeError(`size should not be more than ${this.maxObjectSize}`) + } + if (this.overRidePartSize) { + return this.partSize + } + let partSize = this.partSize + for (;;) { + // while(true) {...} throws linting error. + // If partSize is big enough to accomodate the object size, then use it. + if (partSize * 10000 > size) { + return partSize + } + // Try part sizes as 64MB, 80MB, 96MB etc. + partSize += 16 * 1024 * 1024 + } + } + + /** + * log the request, response, error + */ + private logHTTP(reqOptions: IRequest, response: http.IncomingMessage | null, err?: unknown) { + // if no logStream available return. + if (!this.logStream) { + return + } + if (!isObject(reqOptions)) { + throw new TypeError('reqOptions should be of type "object"') + } + if (response && !isReadableStream(response)) { + throw new TypeError('response should be of type "Stream"') + } + if (err && !(err instanceof Error)) { + throw new TypeError('err should be of type "Error"') + } + const logStream = this.logStream + const logHeaders = (headers: RequestHeaders) => { + Object.entries(headers).forEach(([k, v]) => { + if (k == 'authorization') { + if (isString(v)) { + const redactor = new RegExp('Signature=([0-9a-f]+)') + v = v.replace(redactor, 'Signature=**REDACTED**') + } + } + logStream.write(`${k}: ${v}\n`) + }) + logStream.write('\n') + } + logStream.write(`REQUEST: ${reqOptions.method} ${reqOptions.path}\n`) + logHeaders(reqOptions.headers) + if (response) { + this.logStream.write(`RESPONSE: ${response.statusCode}\n`) + logHeaders(response.headers as RequestHeaders) + } + if (err) { + logStream.write('ERROR BODY:\n') + const errJSON = JSON.stringify(err, null, '\t') + logStream.write(`${errJSON}\n`) + } + } + + /** + * Enable tracing + */ + public traceOn(stream?: stream.Writable) { + if (!stream) { + stream = process.stdout + } + this.logStream = stream + } + + /** + * Disable tracing + */ + public traceOff() { + this.logStream = undefined + } + + /** + * makeRequest is the primitive used by the apis for making S3 requests. + * payload can be empty string in case of no payload. + * statusCode is the expected statusCode. If response.statusCode does not match + * we parse the XML error and call the callback with the error message. + * + * A valid region is passed by the calls - listBuckets, makeBucket and getBucketRegion. + * + * @internal + */ + makeRequestAsync( + options: RequestOption, + payload: Binary | Uint8Array = '', + expectedCodes: number[] = [200], + region = '', + returnResponse = true, + ): Promise { + if (!isObject(options)) { + throw new TypeError('options should be of type "object"') + } + if (!isString(payload) && !isObject(payload)) { + // Buffer is of type 'object' + throw new TypeError('payload should be of type "string" or "Buffer"') + } + expectedCodes.forEach((statusCode) => { + if (!isNumber(statusCode)) { + throw new TypeError('statusCode should be of type "number"') + } + }) + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isBoolean(returnResponse)) { + throw new TypeError('returnResponse should be of type "boolean"') + } + if (!options.headers) { + options.headers = {} + } + if (options.method === 'POST' || options.method === 'PUT' || options.method === 'DELETE') { + options.headers['content-length'] = payload.length.toString() + } + + const sha256sum = this.enableSHA256 ? toSha256(payload) : '' + const stream = readableStream(payload) + return this.makeRequestStreamAsync(options, stream, sha256sum, expectedCodes, region, returnResponse) + } + + /** + * new request with promise + * + * No need to drain response, response body is not valid + */ + async makeRequestAsyncOmit( + options: RequestOption, + payload: Binary | Uint8Array = '', + statusCodes: number[] = [200], + region = '', + ): Promise> { + return await this.makeRequestAsync(options, payload, statusCodes, region, false) + } + + /** + * makeRequestStream will be used directly instead of makeRequest in case the payload + * is available as a stream. for ex. putObject + * + * @internal + */ + makeRequestStreamAsync( + options: RequestOption, + stream: stream.Readable | Buffer, + sha256sum: string, + statusCodes: number[] = [200], + region = '', + returnResponse = true, + ) { + if (!isObject(options)) { + throw new TypeError('options should be of type "object"') + } + if (!(Buffer.isBuffer(stream) || isReadableStream(stream))) { + throw new errors.InvalidArgumentError('stream should be a Buffer or readable Stream') + } + if (!isString(sha256sum)) { + throw new TypeError('sha256sum should be of type "string"') + } + statusCodes.forEach((statusCode) => { + if (!isNumber(statusCode)) { + throw new TypeError('statusCode should be of type "number"') + } + }) + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isBoolean(returnResponse)) { + throw new TypeError('returnResponse should be of type "boolean"') + } + + // sha256sum will be empty for anonymous or https requests + if (!this.enableSHA256 && sha256sum.length !== 0) { + throw new errors.InvalidArgumentError(`sha256sum expected to be empty for anonymous or https requests`) + } + // sha256sum should be valid for non-anonymous http requests. + if (this.enableSHA256 && sha256sum.length !== 64) { + throw new errors.InvalidArgumentError(`Invalid sha256sum : ${sha256sum}`) + } + + const regionPromise = region ? Promise.resolve(region) : this.getBucketRegionAsync(options.bucketName!) + + void this.checkAndRefreshCreds() + + return regionPromise.then( + (finalRegion) => + new Promise((resolve, reject) => { + options.region = finalRegion + const reqOptions = this.getRequestOptions(options) + if (!this.anonymous) { + // For non-anonymous https requests sha256sum is 'UNSIGNED-PAYLOAD' for signature calculation. + if (!this.enableSHA256) { + sha256sum = 'UNSIGNED-PAYLOAD' + } + + const date = new Date() + + reqOptions.headers['x-amz-date'] = makeDateLong(date) + reqOptions.headers['x-amz-content-sha256'] = sha256sum + if (this.sessionToken) { + reqOptions.headers['x-amz-security-token'] = this.sessionToken + } + + reqOptions.headers.authorization = signV4(reqOptions, this.accessKey, this.secretKey, finalRegion, date) + } + + const req = this.transport.request(reqOptions, (response) => { + if (!response.statusCode) { + return reject(new Error("BUG: response doesn't have a statusCode")) + } + + if (!statusCodes.includes(response.statusCode)) { + // For an incorrect region, S3 server always sends back 400. + // But we will do cache invalidation for all errors so that, + // in future, if AWS S3 decides to send a different status code or + // XML error code we will still work fine. + delete this.regionMap[options.bucketName!] + // @ts-expect-error looks like `getErrorTransformer` want a `http.ServerResponse`, + // but we only have a http.IncomingMessage here + const errorTransformer = transformers.getErrorTransformer(response) + pipesetup(response, errorTransformer).on('error', (e) => { + this.logHTTP(reqOptions, response, e) + reject(e) + }) + return + } + this.logHTTP(reqOptions, response) + if (returnResponse) { + return resolve(response) + } + // We drain the socket so that the connection gets closed. Note that this + // is not expensive as the socket will not have any data. + drainResponse(response).then(() => resolve(response), reject) + }) + + req.on('error', (e) => { + this.logHTTP(reqOptions, null, e) + reject(e) + }) + + if (Buffer.isBuffer(stream)) { + req.end(stream) + } else { + pipesetup(stream, req) + } + }), + ) + } + + /// Bucket operations + + /** + * Creates the bucket `bucketName`. + * + * @param bucketName - Name of the bucket + * @param region - region, see ts types for valid values, or use empty string. + * @param makeOpts - Options to create a bucket. + * @param callback? - if no callback. will return a promise. + */ + makeBucket(bucketName: string, region: Region, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + makeBucket(bucketName: string, region: Region, callback: NoResultCallback): void + makeBucket(bucketName: string, callback: NoResultCallback): void + makeBucket(bucketName: string, region?: Region, makeOpts?: MakeBucketOpt): Promise + + // there is also a deprecated Backward Compatibility sign + // makeBucket(bucketName: string, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + + makeBucket( + bucketName: string, + regionOrCallback?: string | NoResultCallback | MakeBucketOpt, // MakeBucketOpt as second params is deprecated + makeOptsOrCallback?: MakeBucketOpt | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + let [[region = '', makeOpts = {}], cb] = findCallback< + [string, MakeBucketOpt] | [MakeBucketOpt, string], + NoResultCallback + >([regionOrCallback, makeOptsOrCallback, callback]) + if (isObject(region)) { + // Backward Compatibility + // makeBucket(bucketName: string, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + makeOpts = region + region = '' + } + + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isObject(makeOpts)) { + throw new TypeError('makeOpts should be of type "object"') + } + + let payload = '' + // Region already set in constructor, validate if + // caller requested bucket location is same. + if (region && this.region) { + if (region !== this.region) { + throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`) + } + } + // sending makeBucket request with XML containing 'us-east-1' fails. For + // default region server expects the request without body + if (region && region !== DEFAULT_REGION) { + const builder = new xml2js.Builder({}) + + payload = builder.buildObject({ + CreateBucketConfiguration: { + $: { + xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', + }, + LocationConstraint: region, + }, + }) + } + const method = 'PUT' + const headers: RequestHeaders = {} + if (makeOpts.ObjectLocking) { + headers['x-amz-bucket-object-lock-enabled'] = true + } + if (!region) { + region = DEFAULT_REGION + } + const finalRegion = region // type narrow + const requestOpt: RequestOption = { method, bucketName, headers } + return asCallbackFn(cb, async () => { + try { + await this.makeRequestAsyncOmit(requestOpt, payload, [200], finalRegion) + } catch (err: unknown) { + if (region === '' || region === DEFAULT_REGION) { + if (err instanceof S3Error) { + const errCode = err.code + const errRegion = err.region + if (errCode === 'AuthorizationHeaderMalformed' && errRegion !== '') { + // Retry with region returned as part of error + await this.makeRequestAsyncOmit(requestOpt, payload, [200], errCode) + } + } + } + throw err + } + }) + } + + /** + * List of buckets created. + */ + listBuckets(): Promise + listBuckets(callback: ResultCallback): void + listBuckets(cb?: ResultCallback): void | Promise { + const method = 'GET' + return asCallbackFn(cb, async () => { + const response = await this.makeRequestAsync({ method }, '', [200], DEFAULT_REGION) + const body = await readAsBuffer(response) + return xmlParsers.parseListBucket(body.toString()) + }) + } + + listIncompleteUploads(bucket: string, prefix: string, recursive: boolean): stream.Readable { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (!isValidBucketName(bucket)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + const delimiter = recursive ? '' : '/' + let keyMarker = '' + let uploadIdMarker = '' + const uploads: unknown[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one upload info per _read() + if (uploads.length) { + return readStream.push(uploads.shift()) + } + if (ended) { + return readStream.push(null) + } + this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + result.prefixes.forEach((prefix) => uploads.push(prefix)) + async.eachSeries( + result.uploads, + (upload, cb) => { + // for each incomplete upload add the sizes of its uploaded parts + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.listParts(bucket, upload.key, upload.uploadId).then( + (parts: any) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + upload.size = parts.reduce((acc, item) => acc + item.size, 0) + uploads.push(upload) + cb() + }, + (err: any) => cb(err), + ) + }, + (err) => { + if (err) { + readStream.emit('error', err) + return + } + if (result.isTruncated) { + keyMarker = result.nextKeyMarker + uploadIdMarker = result.nextUploadIdMarker + } else { + ended = true + } + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + readStream._read() + }, + ) + }) + } + return readStream + } + + /** + * Remove a bucket. + * + * @param bucketName - name of the bucket + */ + bucketExists(bucketName: string, callback: ResultCallback): void + bucketExists(bucketName: string): Promise + + // * `callback(err)` _function_ : `err` is `null` if the bucket exists + bucketExists(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'HEAD' + + return asCallbackFn(cb, async () => { + try { + await this.makeRequestAsyncOmit({ method, bucketName }, '', [200], '') + } catch (err) { + if (err instanceof S3Error) { + if (err.code == 'NoSuchBucket' || err.code == 'NotFound') { + return false + } + } + + throw err + } + + return true + }) + } + + /** + * Remove a bucket + * + * @param bucketName - name of the bucket + * @param callback + */ + removeBucket(bucketName: string, callback: NoResultCallback): void + removeBucket(bucketName: string): Promise + + // * `callback(err)` _function_ : `err` is `null` if the bucket is removed successfully. + removeBucket(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + const method = 'DELETE' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName }, '', [204], '') + delete this.regionMap[bucketName] + }) + } + + /** + * Remove the partially uploaded object. + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param callback - callback function is called with non `null` value in case of error + */ + removeIncompleteUpload(bucketName: string, objectName: string, callback: NoResultCallback): void + removeIncompleteUpload(bucketName: string, objectName: string): Promise + + removeIncompleteUpload(bucketName: string, objectName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.IsValidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallbackFn(cb, async () => { + const uploadId = await this.findUploadId(bucketName, objectName) + if (!uploadId) { + return + } + const method = 'DELETE' + const query = `uploadId=${uploadId}` + await this.makeRequestAsync( + { + method, + bucketName, + objectName, + query, + }, + '', + [204], + '', + false, + ) + }) + } + + fGetObject(bucketName: string, objectName: string, filePath: string, callback: NoResultCallback): void + fGetObject( + bucketName: string, + objectName: string, + filePath: string, + getOpts: GetObjectOpt, + callback: NoResultCallback, + ): void + /** + * Callback is called with `error` in case of error or `null` in case of success + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param filePath - path to which the object data will be written to + * @param getOpts? - Optional object get option + */ + fGetObject(bucketName: string, objectName: string, filePath: string, getOpts?: GetObjectOpt): Promise + + fGetObject( + bucketName: string, + objectName: string, + filePath: string, + getOptsOrCallback?: GetObjectOpt | NoResultCallback, + callback?: NoResultCallback, + ) { + // Input validation. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + const [[getOpts = {}], cb] = findCallback<[GetObjectOpt], NoResultCallback>([getOptsOrCallback, callback]) + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const executor = async (): Promise => { + let partFileStream: stream.Writable + const objStat = await this.statObject(bucketName, objectName, getOpts) + const partFile = `${filePath}.${objStat.etag}.part.minio` + + await mkdirp(path.dirname(filePath)) + + let offset = 0 + try { + const stats = await fsp.stat(partFile) + if (objStat.size === stats.size) { + return partFile + } + offset = stats.size + partFileStream = fs.createWriteStream(partFile, { flags: 'a' }) + } catch (e) { + if (e instanceof Error && (e as unknown as { code: string }).code === 'ENOENT') { + // file not exist + partFileStream = fs.createWriteStream(partFile, { flags: 'w' }) + } else { + // other error, maybe access deny + throw e + } + } + + const downloadStream = await this.getPartialObject(bucketName, objectName, offset, 0, getOpts) + + await streamPromise.pipeline(downloadStream, partFileStream) + const stats = await fsp.stat(partFile) + if (stats.size === objStat.size) { + return partFile + } + + throw new Error('Size mismatch between downloaded file and the object') + } + + return asCallback( + cb, + executor().then((partFile) => fsp.rename(partFile, filePath)), + ) + } + + getObject( + bucketName: string, + objectName: string, + getOpts: GetObjectOpt, + callback: ResultCallback, + ): void + getObject(bucketName: string, objectName: string, callback: ResultCallback): void + + /** + * Get Objects. return a readable stream of the object content by callback or promise. + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param getOpts + */ + getObject(bucketName: string, objectName: string, getOpts?: GetObjectOpt): Promise + + getObject( + bucketName: string, + objectName: string, + getOpts_Callback?: GetObjectOpt | ResultCallback, // getOpts + callback?: ResultCallback, // callback + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const [[getOpts = {}], cb] = findCallback<[GetObjectOpt], ResultCallback>([ + getOpts_Callback, + callback, + ]) + + return asCallback(cb, this.getPartialObject(bucketName, objectName, 0, 0, getOpts)) + } + + /** + * Callback is called with readable stream of the partial object content. + */ + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length?: number, + getOpts?: GetObjectOpt, + ): Promise + + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + callback: ResultCallback, + ): void + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length: number, + callback: ResultCallback, + ): void + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length: number, + getOpts: GetObjectOpt, + callback: ResultCallback, + ): void + + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length_callback?: number | ResultCallback, // length + getOpts_callback?: GetObjectOpt | ResultCallback, // get opt + callback?: ResultCallback, // callback + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isNumber(offset)) { + throw new TypeError('offset should be of type "number"') + } + + const [[length = 0, getOpts = {}], cb] = findCallback<[number, GetObjectOpt], ResultCallback>([ + length_callback, + getOpts_callback, + callback, + ]) + + if (!isNumber(length)) { + throw new TypeError(`length should be of type "number"`) + } + + let range = '' + if (offset || length) { + if (offset) { + range = `bytes=${+offset}-` + } else { + range = 'bytes=0-' + offset = 0 + } + if (length) { + range += `${+length + offset - 1}` + } + } + + const headers: RequestHeaders = {} + if (range !== '') { + headers.range = range + } + + const expectedStatusCodes = [200] + if (range) { + expectedStatusCodes.push(206) + } + + const method = 'GET' + const query = qs(getOpts) + return asCallback( + cb, + this.makeRequestAsync({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes), + ) + } + + /** + * Uploads the object. + * + * Uploading a stream + * __Arguments__ + * * `bucketName` _string_: name of the bucket + * * `objectName` _string_: name of the object + * * `stream` _Stream_: Readable stream + * * `size` _number_: size of the object (optional) + * * `callback(err, etag)` _function_: non null `err` indicates error, `etag` _string_ is the etag of the object uploaded. + * + * Uploading "Buffer" or "string" + * __Arguments__ + * * `bucketName` _string_: name of the bucket + * * `objectName` _string_: name of the object + * * `string or Buffer` _string_ or _Buffer_: string or buffer + * * `callback(err, objInfo)` _function_: `err` is `null` in case of success and `info` will have the following object details: + * * `etag` _string_: etag of the object + * * `callback(err, objInfo)` _function_: non null `err` indicates error, `objInfo` _object_ which contains versionId and etag. + */ + fPutObject( + bucketName: string, + objectName: string, + filePath: string, + metaDataOrCallback?: MetaData, + maybeCallback?: NoResultCallback, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + let [[metaData = {}], callback] = findCallback<[MetaData], NoResultCallback>([metaDataOrCallback, maybeCallback]) + + if (!isObject(metaData)) { + throw new TypeError('metaData should be of type "object"') + } + + // Inserts correct `content-type` attribute based on metaData and filePath + metaData = insertContentType(metaData, filePath) + + // Updates metaData to have the correct prefix if needed + metaData = prependXAMZMeta(metaData) + const apiCallback = callback + + type Part = { + part: number + etag: string + } + + const executor = async (fd: number) => { + const stats = await fsp.fstat(fd) + const fileSize = stats.size + if (fileSize > this.maxObjectSize) { + throw new Error(`${filePath} size : ${stats.size}, max allowed size: 5TB`) + } + + if (fileSize <= this.partSize) { + // simple PUT request, no multipart + const uploader = this.getUploader(bucketName, objectName, metaData, false) + const buf = await fsp.readfile(fd) + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.enableSHA256) + return await uploader(buf, fileSize, sha256sum, md5sum) + } + + const previousUploadId = await this.findUploadId(bucketName, objectName) + let eTags: Part[] = [] + // if there was a previous incomplete upload, fetch all its uploaded parts info + let uploadId: string + if (previousUploadId) { + eTags = await this.listParts(bucketName, objectName, previousUploadId) + uploadId = previousUploadId + } else { + // there was no previous upload, initiate a new one + uploadId = await this.initiateNewMultipartUpload(bucketName, objectName, metaData) + } + + { + const partSize = this.calculatePartSize(fileSize) + const uploader = this.getUploader(bucketName, objectName, metaData, true) + // convert array to object to make things easy + const parts = eTags.reduce(function (acc, item) { + if (!acc[item.part]) { + acc[item.part] = item + } + return acc + }, {} as Record) + const partsDone: { part: number; etag: string }[] = [] + let partNumber = 1 + let uploadedSize = 0 + + // will be reused for hashing and uploading + // don't worry it's "unsafe", we will read data from fs to fill it + const buf = Buffer.allocUnsafe(this.partSize) + while (uploadedSize < fileSize) { + const part = parts[partNumber] + let length = partSize + if (length > fileSize - uploadedSize) { + length = fileSize - uploadedSize + } + + await fsp.read(fd, buf, 0, length, 0) + const { md5sum, sha256sum } = transformers.hashBinary(buf.subarray(0, length), this.enableSHA256) + + const md5sumHex = Buffer.from(md5sum, 'base64').toString('hex') + + if (part && md5sumHex === part.etag) { + // md5 matches, chunk already uploaded + partsDone.push({ part: partNumber, etag: part.etag }) + partNumber++ + uploadedSize += length + continue + } + + const objInfo = await uploader(uploadId, partNumber, buf.subarray(0, length), length, sha256sum, md5sum) + partsDone.push({ part: partNumber, etag: objInfo.etag }) + partNumber++ + uploadedSize += length + } + eTags = partsDone + } + + // at last, finish uploading + return this.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + } + + const ensureFileClose = async (executor: (fd: number) => Promise) => { + let fd + try { + fd = await fsp.open(filePath, 'r') + } catch (e) { + throw new Error(`failed to open file ${filePath}: err ${e}`, { cause: e }) + } + + try { + // make sure to keep await, otherwise file will be closed early. + return await executor(fd) + } finally { + await fsp.fclose(fd) + } + } + + return asCallback(apiCallback, ensureFileClose(executor)) + } + + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + /* eslint-disable @typescript-eslint/ban-ts-comment */ + + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + ): Promise<{ etag: string; versionId: string | null }> + + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + cb: ResultCallback<{ etag: string; versionId: string | null }>, + ): void + + // this call will aggregate the parts on the server into a single object. + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + cb?: ResultCallback<{ etag: string; versionId: string | null }>, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isObject(etags)) { + throw new TypeError('etags should be of type "Array"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + + const method = 'POST' + const query = `uploadId=${uriEscape(uploadId)}` + + const builder = new xml2js.Builder() + const payload = builder.buildObject({ + CompleteMultipartUpload: { + $: { + xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', + }, + Part: etags.map((etag) => { + return { + PartNumber: etag.part, + ETag: etag.etag, + } + }), + }, + }) + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }, payload) + const body = await readAsBuffer(res) + const result = xmlParsers.parseCompleteMultipart(body.toString()) + if (!result) { + throw new Error('BUG: failed to parse server response') + } + + if (result.errCode) { + // Multipart Complete API returns an error XML after a 200 http status + throw new errors.S3Error(result.errMessage) + } + + return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + etag: result.etag as string, + versionId: getVersionId(res.headers), + } + }) + } + + // Called by listIncompleteUploads to fetch a batch of incomplete uploads. + listIncompleteUploadsQuery( + bucketName: string, + prefix: string, + keyMarker: string, + uploadIdMarker: string, + delimiter: string, + ): stream.Transform { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(keyMarker)) { + throw new TypeError('keyMarker should be of type "string"') + } + if (!isString(uploadIdMarker)) { + throw new TypeError('uploadIdMarker should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + const queries = [] + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + + if (keyMarker) { + keyMarker = uriEscape(keyMarker) + queries.push(`key-marker=${keyMarker}`) + } + if (uploadIdMarker) { + queries.push(`upload-id-marker=${uploadIdMarker}`) + } + + const maxUploads = 1000 + queries.push(`max-uploads=${maxUploads}`) + queries.sort() + queries.unshift('uploads') + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + const method = 'GET' + const transformer = transformers.getListMultipartTransformer() + this.makeRequestAsync({ method, bucketName, query }, '', [200], '', true).then( + (response) => { + if (!response) { + throw new Error('BUG: no response') + } + + pipesetup(response, transformer) + }, + (e) => { + return transformer.emit('error', e) + }, + ) + return transformer + } + + public get extensions() { + return this.clientExtensions + } + + public async setCredentialsProvider(credentialsProvider: CredentialProvider) { + if (!(credentialsProvider instanceof CredentialProvider)) { + throw new Error('Unable to get credentials. Expected instance of CredentialProvider') + } + this.credentialsProvider = credentialsProvider + await this.checkAndRefreshCreds() + } + + private async fetchCredentials() { + if (this.credentialsProvider) { + const credentialsConf = await this.credentialsProvider.getCredentials() + if (credentialsConf) { + // @ts-expect-error secretKey maybe undefined + this.accessKey = credentialsConf.getAccessKey() + // @ts-expect-error secretKey maybe undefined + this.secretKey = credentialsConf.getSecretKey() + this.sessionToken = credentialsConf.getSessionToken() + } else { + throw new Error( + `Unable to get credentials. Expected instance of BaseCredentialsProvider, get ${credentialsConf}`, + ) + } + } else { + throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') + } + } + + /** + * Initiate a new multipart upload. + * @internal + */ + async initiateNewMultipartUpload(bucketName: string, objectName: string, metaData: MetaData): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(metaData)) { + throw new errors.InvalidObjectNameError('contentType should be of type "object"') + } + const method = 'POST' + const headers = Object.assign({}, metaData) + const query = 'uploads' + const res = await this.makeRequestAsync({ method, bucketName, objectName, query, headers }) + const body = await readAsBuffer(res) + return xmlParsers.parseInitiateMultipart(body.toString()) + } + + // TODO: this method some times will fail, and cause unhandled rejection error. + protected async checkAndRefreshCreds() { + if (this.credentialsProvider) { + return await this.fetchCredentials() + } + } + + /** + * gets the region of the bucket + * + * @param bucketName + * + * @internal + */ + protected async getBucketRegionAsync(bucketName: string): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) + } + + const me = this + + const executor = async (): Promise => { + // Region is set with constructor, return the region right here. + if (this.region) { + return this.region + } + + const cached = this.regionMap[bucketName] + if (cached) { + return cached + } + + const extractRegionAsync = async (response: IncomingMessage) => { + const body = await readAsString(response) + const region = xmlParsers.parseBucketRegion(body) + this.regionMap[bucketName] = region + return region + } + + const method = 'GET' + const query = 'location' + + // `getBucketLocation` behaves differently in following ways for + // different environments. + // + // - For nodejs env we default to path style requests. + // - For browser env path style requests on buckets yields CORS + // error. To circumvent this problem we make a virtual host + // style request signed with 'us-east-1'. This request fails + // with an error 'AuthorizationHeaderMalformed', additionally + // the error XML also provides Region of the bucket. To validate + // this region is proper we retry the same request with the newly + // obtained region. + const pathStyle = this.pathStyle && !isBrowser + + let region: string + + try { + const res = await me.makeRequestAsync({ method, bucketName, query, pathStyle }, '', [200], DEFAULT_REGION) + return extractRegionAsync(res) + } catch (e) { + if (!(e instanceof Error && e.name === 'AuthorizationHeaderMalformed')) { + throw e + } + // @ts-expect-error we set extra properties on error object + region = e.Region as string + if (!region) { + throw e + } + } + + const res = await me.makeRequestAsync({ method, bucketName, query, pathStyle }, '', [200], region) + return extractRegionAsync(res) + } + + return executor() + } + + findUploadId(bucketName: string, objectName: string, cb: ResultCallback): void + findUploadId(bucketName: string, objectName: string): Promise + findUploadId( + bucketName: string, + objectName: string, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + return asCallback( + cb, + new Promise((resolve, reject) => { + let latestUpload: string | undefined + const listNext = (keyMarker: string, uploadIdMarker: string) => { + this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '') + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + .on('error', (e) => reject(e)) + .on('data', (result) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + result.uploads.forEach((upload) => { + if (upload.key === objectName) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) { + latestUpload = upload + return + } + } + }) + if (result.isTruncated) { + listNext(result.nextKeyMarker as string, result.nextUploadIdMarker as string) + return + } + if (latestUpload) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return resolve(latestUpload.uploadId as string) + } + resolve(undefined) + }) + } + listNext('', '') + }), + ) + } + + // Stat information of the object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `statOpts` _object_ : Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional). + + statObject( + bucketName: string, + objectName: string, + statOpts: StatObjectOpts, + callback: ResultCallback, + ): void + statObject(bucketName: string, objectName: string, callback: ResultCallback): void + statObject(bucketName: string, objectName: string, statOpts?: StatObjectOpts): Promise + + statObject( + bucketName: string, + objectName: string, + statOptsOrCallback: StatObjectOpts | ResultCallback = {}, + callback?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let statOpts: StatObjectOpts = {} + let cb: ResultCallback | undefined + + // backward compatibility + if (typeof statOptsOrCallback === 'function') { + // statObject(bucketName, objectName, callback): void + statOpts = {} + cb = statOptsOrCallback + } else { + // statObject(bucketName, objectName, statOpts, callback): void + statOpts = statOptsOrCallback + cb = callback + } + + if (!isObject(statOpts)) { + throw new errors.InvalidArgumentError('statOpts should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const query = qs(statOpts) + const method = 'HEAD' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + + // We drain the socket so that the connection gets closed. Note that this + // is not expensive as the socket will not have any data. + // HEAD request doesn't expect to have many response body + await drainResponse(res) + + const result: BucketItemStat = { + size: parseInt(res.headers['content-length'] as string), + metaData: extractMetadata(res.headers), + lastModified: new Date(res.headers['last-modified'] as string), + versionId: getVersionId(res.headers), + etag: sanitizeETag(res.headers.etag), + } + + return result + }) + } + + getUploader( + bucketName: string, + objectName: string, + metaData: MetaData, + multipart: false, + ): (buf: Buffer, length: number, sha256sum: string, md5sum: string) => Promise + getUploader( + bucketName: string, + objectName: string, + metaData: MetaData, + multipart: true, + ): ( + uploadId: string, + partNumber: number, + buf: Buffer, + length: number, + sha256sum: string, + md5sum: string, + ) => Promise + + // a part of the multipart. + getUploader(bucketName: string, objectName: string, metaData: MetaData, multipart: boolean) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isBoolean(multipart)) { + throw new TypeError('multipart should be of type "boolean"') + } + if (!isObject(metaData)) { + throw new TypeError('metadata should be of type "object"') + } + + const validate = (stream: stream.Readable | Buffer, length: number, sha256sum: string, md5sum: string) => { + if (!(Buffer.isBuffer(stream) || isReadableStream(stream))) { + throw new TypeError('stream should be of type "Stream" or Buffer') + } + if (!isNumber(length)) { + throw new TypeError('length should be of type "number"') + } + if (!isString(sha256sum)) { + throw new TypeError('sha256sum should be of type "string"') + } + if (!isString(md5sum)) { + throw new TypeError('md5sum should be of type "string"') + } + } + + const simpleUploader = (buf: Buffer, length: number, sha256sum: string, md5sum: string) => { + validate(buf, length, sha256sum, md5sum) + return upload('', buf, length, sha256sum, md5sum) + } + + const multipartUploader = ( + uploadId: string, + partNumber: number, + buf: Buffer, + length: number, + sha256sum: string, + md5sum: string, + ) => { + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isNumber(partNumber)) { + throw new TypeError('partNumber should be of type "number"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('Empty uploadId') + } + if (!partNumber) { + throw new errors.InvalidArgumentError('partNumber cannot be 0') + } + validate(buf, length, sha256sum, md5sum) + const query = `partNumber=${partNumber}&uploadId=${uriEscape(uploadId)}` + return upload(query, buf, length, sha256sum, md5sum) + } + + const upload = async (query: string, stream: Buffer, length: number, sha256sum: string, md5sum: string) => { + const method = 'PUT' + let headers: RequestHeaders = { 'Content-Length': length } + + if (!multipart) { + headers = Object.assign({}, metaData, headers) + } + + if (!this.enableSHA256) { + headers['Content-MD5'] = md5sum + } + + const response = await this.makeRequestStreamAsync( + { + method, + bucketName, + objectName, + query, + headers, + }, + stream, + sha256sum, + [200], + '', + false, + ) + return { + etag: sanitizeETag(response.headers.etag), + versionId: getVersionId(response.headers), + } + } + if (multipart) { + return multipartUploader + } + return simpleUploader + } + + // Get part-info of all parts of an incomplete upload specified by uploadId. + listParts(bucketName: string, objectName: string, uploadId: string): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + return new Promise((resolve, reject) => { + let parts: Part[] = [] + const listNext = (marker?: number) => { + this.listPartsQuery(bucketName, objectName, uploadId, marker) + .then((result) => { + parts = parts.concat(result.parts) + if (result.isTruncated) { + listNext(result.marker) + return + } + resolve(parts) + }) + .catch((e) => reject(e)) + } + listNext(0) + }) + } + + // Called by listParts to fetch a batch of part-info + async listPartsQuery(bucketName: string, objectName: string, uploadId: string, marker?: number) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isNumber(marker)) { + throw new TypeError('marker should be of type "number"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + let query = '' + if (marker && marker !== 0) { + query += `part-number-marker=${marker}&` + } + query += `uploadId=${uriEscape(uploadId)}` + + const method = 'GET' + + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseListParts(body.toString()) + } +} + +export async function uploadStream({ + client, + bucketName, + objectName, + metaData, + stream: source, + partSize, +}: { + client: Client + bucketName: string + objectName: string + metaData: MetaData + stream: stream.Readable + partSize: number +}): Promise { + // A map of the previously uploaded chunks, for resuming a file upload. This + // will be null if we aren't resuming an upload. + const oldParts: Record = {} + + // Keep track of the etags for aggregating the chunks together later. Each + // etag represents a single chunk of the file. + const eTags: Part[] = [] + + const previousUploadId = await client.findUploadId(bucketName, objectName) + let uploadId: string + if (!previousUploadId) { + uploadId = await client.initiateNewMultipartUpload(bucketName, objectName, metaData) + } else { + uploadId = previousUploadId + const oldTags = await client.listParts(bucketName, objectName, previousUploadId) + oldTags.forEach((e) => { + oldTags[e.part] = e + }) + } + + const chunkier = new BlockStream2({ size: partSize, zeroPadding: false }) + + const [_, o] = await Promise.all([ + new Promise((resolve, reject) => { + source.pipe(chunkier) + chunkier.on('end', resolve) + source.on('error', reject) + chunkier.on('error', reject) + }), + (async () => { + let partNumber = 1 + + for await (const chunk of chunkier) { + const md5 = crypto.createHash('md5').update(chunk).digest() + + const oldPart = oldParts[partNumber] + if (oldPart) { + if (oldPart.etag === md5.toString('hex')) { + eTags.push({ part: partNumber, etag: oldPart.etag }) + partNumber++ + continue + } + } + + partNumber++ + + // now start to upload missing part + const options: RequestOption = { + method: 'PUT', + query: qs({ partNumber, uploadId }), + headers: { + 'Content-Length': chunk.length, + 'Content-MD5': md5.toString('base64'), + }, + bucketName, + objectName, + } + + const response = await client.makeRequestAsyncOmit(options, chunk) + + let etag = response.headers.etag + if (etag) { + etag = etag.replace(/^"/, '').replace(/"$/, '') + } else { + etag = '' + } + + eTags.push({ part: partNumber, etag }) + } + + return await client.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + })(), + ]) + + return o +} diff --git a/src/copyConditions.ts b/src/copyConditions.ts new file mode 100644 index 00000000..25d00331 --- /dev/null +++ b/src/copyConditions.ts @@ -0,0 +1,37 @@ +export class CopyConditions { + public modified: string + public unmodified: string + public matchETag: string + public matchETagExcept: string + + constructor() { + this.modified = '' + this.unmodified = '' + this.matchETag = '' + this.matchETagExcept = '' + } + + setModified(date: Date): void { + if (!(date instanceof Date)) { + throw new TypeError('date must be of type Date') + } + + this.modified = date.toUTCString() + } + + setUnmodified(date: Date): void { + if (!(date instanceof Date)) { + throw new TypeError('date must be of type Date') + } + + this.unmodified = date.toUTCString() + } + + setMatchETag(etag: string): void { + this.matchETag = etag + } + + setMatchETagExcept(etag: string): void { + this.matchETagExcept = etag + } +} diff --git a/src/errors.ts b/src/errors.ts index fa6f62fb..12c583bd 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -14,94 +14,74 @@ * limitations under the License. */ -import { ExtendableError } from './base-error.ts' - -/** - * AnonymousRequestError is generated for anonymous keys on specific - * APIs. NOTE: PresignedURL generation always requires access keys. - */ -export class AnonymousRequestError extends ExtendableError {} - -/** - * InvalidArgumentError is generated for all invalid arguments. - */ +class ExtendableError extends Error { + // es6 doesn't support new error cause + // and nodejs runtime will add stack automatically, no need to add it. + constructor(message?: string, opt?: ErrorOptions) { + super(message, opt) + this.name = this.constructor.name + } +} + +// AnonymousRequestError is generated for anonymous keys on specific +// APIs. NOTE: PresignedURL generation always requires access keys. +export class AnonymousRequestError extends Error {} + +// InvalidArgumentError is generated for all invalid arguments. export class InvalidArgumentError extends ExtendableError {} -/** - * InvalidPortError is generated when a non integer value is provided - * for ports. - */ +// InvalidPortError is generated when a non integer value is provided +// for ports. export class InvalidPortError extends ExtendableError {} -/** - * InvalidEndpointError is generated when an invalid end point value is - * provided which does not follow domain standards. - */ +// InvalidEndpointError is generated when an invalid end point value is +// provided which does not follow domain standards. export class InvalidEndpointError extends ExtendableError {} -/** - * InvalidBucketNameError is generated when an invalid bucket name is - * provided which does not follow AWS S3 specifications. - * http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html - */ +// InvalidBucketNameError is generated when an invalid bucket name is +// provided which does not follow AWS S3 specifications. +// http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html export class InvalidBucketNameError extends ExtendableError {} -/** - * InvalidObjectNameError is generated when an invalid object name is - * provided which does not follow AWS S3 specifications. - * http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html - */ +// InvalidObjectNameError is generated when an invalid object name is +// provided which does not follow AWS S3 specifications. +// http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html export class InvalidObjectNameError extends ExtendableError {} -/** - * AccessKeyRequiredError generated by signature methods when access - * key is not found. - */ +// AccessKeyRequiredError generated by signature methods when access +// key is not found. export class AccessKeyRequiredError extends ExtendableError {} -/** - * SecretKeyRequiredError generated by signature methods when secret - * key is not found. - */ +// SecretKeyRequiredError generated by signature methods when secret +// key is not found. export class SecretKeyRequiredError extends ExtendableError {} -/** - * ExpiresParamError generated when expires parameter value is not - * well within stipulated limits. - */ +// ExpiresParamError generated when expires parameter value is not +// well within stipulated limits. export class ExpiresParamError extends ExtendableError {} -/** - * InvalidDateError generated when invalid date is found. - */ +// InvalidDateError generated when invalid date is found. export class InvalidDateError extends ExtendableError {} -/** - * InvalidPrefixError generated when object prefix provided is invalid - * or does not conform to AWS S3 object key restrictions. - */ +// InvalidPrefixError generated when object prefix provided is invalid +// or does not conform to AWS S3 object key restrictions. export class InvalidPrefixError extends ExtendableError {} -/** - * InvalidBucketPolicyError generated when the given bucket policy is invalid. - */ +// InvalidBucketPolicyError generated when the given bucket policy is invalid. export class InvalidBucketPolicyError extends ExtendableError {} -/** - * IncorrectSizeError generated when total data read mismatches with - * the input size. - */ +// IncorrectSizeError generated when total data read mismatches with +// the input size. export class IncorrectSizeError extends ExtendableError {} -/** - * InvalidXMLError generated when an unknown XML is found. - */ +// InvalidXMLError generated when an unknown XML is found. export class InvalidXMLError extends ExtendableError {} -/** - * S3Error is generated for errors returned from S3 server. - * see getErrorTransformer for details - */ -export class S3Error extends ExtendableError {} +// S3Error is generated for errors returned from S3 server. +// see getErrorTransformer for details +export class S3Error extends ExtendableError { + code?: string + region?: string +} export class IsValidBucketNameError extends ExtendableError {} diff --git a/src/extensions.js b/src/extensions.ts similarity index 80% rename from src/extensions.js rename to src/extensions.ts index 5e04a930..8e018ce1 100644 --- a/src/extensions.js +++ b/src/extensions.ts @@ -14,16 +14,18 @@ * limitations under the License. */ -import * as Stream from 'node:stream' +import * as stream from 'node:stream' import * as errors from './errors.ts' -import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.js' -import * as transformers from './transformers.js' +import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.ts' +import * as transformers from './transformers.ts' +import type { TypedClient2 } from './typed-client2.ts' + +// TODO +type S3Object = unknown export class extensions { - constructor(client) { - this.client = client - } + constructor(readonly client: TypedClient2) {} // List the objects in the bucket using S3 ListObjects V2 With Metadata // @@ -42,7 +44,7 @@ export class extensions { // * `obj.lastModified` _Date_: modified time stamp // * `obj.metadata` _object_: metadata of the object - listObjectsV2WithMetadata(bucketName, prefix, recursive, startAfter) { + listObjectsV2WithMetadata(bucketName: string, prefix: string, recursive: boolean, startAfter: string) { if (prefix === undefined) { prefix = '' } @@ -68,11 +70,11 @@ export class extensions { throw new TypeError('startAfter should be of type "string"') } // if recursive is false set delimiter to '/' - var delimiter = recursive ? '' : '/' - var continuationToken = '' - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) + const delimiter = recursive ? '' : '/' + let continuationToken = '' + let objects: S3Object[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) readStream._read = () => { // push one object per _read() if (objects.length) { @@ -92,6 +94,7 @@ export class extensions { ended = true } objects = result.objects + // @ts-expect-error read more readStream._read() }) } @@ -109,7 +112,14 @@ export class extensions { // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. - listObjectsV2WithMetadataQuery(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { + private listObjectsV2WithMetadataQuery( + bucketName: string, + prefix: string, + continuationToken: string, + delimiter: string, + maxKeys: number, + startAfter: string, + ) { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -128,7 +138,7 @@ export class extensions { if (!isString(startAfter)) { throw new TypeError('startAfter should be of type "string"') } - var queries = [] + const queries = [] // Call for listing objects v2 API queries.push(`list-type=2`) @@ -155,23 +165,29 @@ export class extensions { queries.push(`max-keys=${maxKeys}`) } queries.sort() - var query = '' + let query = '' if (queries.length > 0) { query = `${queries.join('&')}` } - var method = 'GET' - var transformer = transformers.getListObjectsV2WithMetadataTransformer() - this.client.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) + const method = 'GET' + const transformer = transformers.getListObjectsV2WithMetadataTransformer() + this.client + .makeRequestAsync({ + method, + bucketName, + query, + }) + .then( + (response) => { + if (!response) { + throw new Error('BUG: callback missing response argument') + } + pipesetup(response, transformer) + }, + (e) => { + return transformer.emit('error', e) + }, + ) return transformer } } - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default extensions diff --git a/src/helpers.js b/src/helpers.ts similarity index 59% rename from src/helpers.js rename to src/helpers.ts index 18091089..35dae480 100644 --- a/src/helpers.js +++ b/src/helpers.ts @@ -14,9 +14,10 @@ * limitations under the License. */ -import * as Crypto from 'node:crypto' -import * as fs from 'node:fs' -import * as path from 'node:path' +import * as crypto from 'node:crypto' +import fs from 'node:fs' +import type { IncomingHttpHeaders } from 'node:http' +import path from 'node:path' import * as stream from 'node:stream' import { isBrowser } from 'browser-or-node' @@ -24,50 +25,20 @@ import { XMLParser } from 'fast-xml-parser' import ipaddr from 'ipaddr.js' import _ from 'lodash' import mime from 'mime-types' -import querystring from 'query-string' import * as errors from './errors.ts' +import { qs } from './qs.ts' +import type { Binary, Mode } from './type.ts' -const fxp = new XMLParser() - -// Returns a wrapper function that will promisify a given callback function. -// It will preserve 'this'. -export function promisify(fn) { - return function () { - // If the last argument is a function, assume its the callback. - let callback = arguments[arguments.length - 1] - - // If the callback is given, don't promisify, just pass straight in. - if (typeof callback === 'function') { - return fn.apply(this, arguments) - } - - // Otherwise, create a new set of arguments, and wrap - // it in a promise. - let args = [...arguments] - - return new Promise((resolve, reject) => { - // Add the callback function. - args.push((err, value) => { - if (err) { - return reject(err) - } - - resolve(value) - }) - - // Call the function with our special adaptor callback added. - fn.apply(this, args) - }) - } -} +export type MetaData = Record +export type Header = Record // All characters in string which are NOT unreserved should be percent encoded. // Unreserved characers are : ALPHA / DIGIT / "-" / "." / "_" / "~" // Reference https://tools.ietf.org/html/rfc3986#section-2.2 -export function uriEscape(string) { - return string.split('').reduce((acc, elem) => { - let buf = Buffer.from(elem) +export function uriEscape(string: string) { + return string.split('').reduce((acc: string, elem: string) => { + const buf = Buffer.from(elem) if (buf.length === 1) { // length 1 indicates that elem is not a unicode character. // Check if it is an unreserved characer. @@ -87,23 +58,23 @@ export function uriEscape(string) { } // elem needs encoding - i.e elem should be encoded if it's not unreserved // character or if it's a unicode character. - for (var i = 0; i < buf.length; i++) { - acc = acc + '%' + buf[i].toString(16).toUpperCase() + for (const char of buf) { + acc = acc + '%' + char.toString(16).toUpperCase() } return acc }, '') } -export function uriResourceEscape(string) { +export function uriResourceEscape(string: string) { return uriEscape(string).replace(/%2F/g, '/') } -export function getScope(region, date, serviceName = 's3') { +export function getScope(region: string, date: Date, serviceName = 's3') { return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request` } // isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' -export function isAmazonEndpoint(endpoint) { +export function isAmazonEndpoint(endpoint: string) { return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn' } @@ -112,24 +83,24 @@ export function isAmazonEndpoint(endpoint) { // style if the protocol is 'https:', this is due to SSL wildcard // limitation. For all other buckets and Amazon S3 endpoint we will // default to virtual host style. -export function isVirtualHostStyle(endpoint, protocol, bucket, pathStyle) { - if (protocol === 'https:' && bucket.indexOf('.') > -1) { +export function isVirtualHostStyle(endpoint: string, protocol: string, bucket: string, pathStyle: boolean) { + if (protocol === 'https:' && bucket.includes('.')) { return false } return isAmazonEndpoint(endpoint) || !pathStyle } -export function isValidIP(ip) { +export function isValidIP(ip: string) { return ipaddr.isValid(ip) } // isValidEndpoint - true if endpoint is valid domain. -export function isValidEndpoint(endpoint) { +export function isValidEndpoint(endpoint: string) { return isValidDomain(endpoint) || isValidIP(endpoint) } // isValidDomain - true if input host is a valid domain. -export function isValidDomain(host) { +export function isValidDomain(host: string) { if (!isString(host)) { return false } @@ -149,10 +120,10 @@ export function isValidDomain(host) { if (host[0] === '.') { return false } - var alphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:> -1) { + for (const char of alphaNumerics) { + if (host.includes(char)) { return false } } @@ -163,7 +134,7 @@ export function isValidDomain(host) { // Probes contentType using file extensions. // For example: probeContentType('file.png') returns 'image/png'. -export function probeContentType(path) { +export function probeContentType(path: string) { let contentType = mime.lookup(path) if (!contentType) { contentType = 'application/octet-stream' @@ -172,7 +143,7 @@ export function probeContentType(path) { } // isValidPort - is input port valid. -export function isValidPort(port) { +export function isValidPort(port: unknown): port is number { // verify if port is a number. if (!isNumber(port)) { return false @@ -185,13 +156,13 @@ export function isValidPort(port) { if (port === 0) { return true } - var min_port = 1 - var max_port = 65535 + const min_port = 1 + const max_port = 65535 // Verify if port is in range. return port >= min_port && port <= max_port } -export function isValidBucketName(bucket) { +export function isValidBucketName(bucket: unknown) { if (!isString(bucket)) { return false } @@ -202,7 +173,7 @@ export function isValidBucketName(bucket) { return false } // bucket with successive periods is invalid. - if (bucket.indexOf('..') > -1) { + if (bucket.includes('..')) { return false } // bucket cannot have ip address style. @@ -218,7 +189,7 @@ export function isValidBucketName(bucket) { } // check if objectName is a valid object name -export function isValidObjectName(objectName) { +export function isValidObjectName(objectName: unknown) { if (!isValidPrefix(objectName)) { return false } @@ -229,7 +200,7 @@ export function isValidObjectName(objectName) { } // check if prefix is valid -export function isValidPrefix(prefix) { +export function isValidPrefix(prefix: unknown): prefix is string { if (!isString(prefix)) { return false } @@ -240,80 +211,98 @@ export function isValidPrefix(prefix) { } // check if typeof arg number -export function isNumber(arg) { +export function isNumber(arg: unknown): arg is number { return typeof arg === 'number' } +export type AnyFunction = (...args: any[]) => any + // check if typeof arg function -export function isFunction(arg) { +export function isFunction(arg: unknown): arg is AnyFunction { + return typeof arg === 'function' +} + +// check if typeof arg function or undefined +export function isOptionalFunction(arg: unknown): arg is undefined | AnyFunction { + if (arg === undefined) { + return true + } return typeof arg === 'function' } // check if typeof arg string -export function isString(arg) { +export function isString(arg: unknown): arg is string { return typeof arg === 'string' } // check if typeof arg object -export function isObject(arg) { +export function isObject(arg: unknown): arg is object { return typeof arg === 'object' && arg !== null } // check if object is readable stream -export function isReadableStream(arg) { - return isObject(arg) && isFunction(arg._read) +export function isReadableStream(arg: unknown): arg is stream.Readable { + // eslint-disable-next-line @typescript-eslint/unbound-method + return isObject(arg) && isFunction((arg as stream.Readable)._read) } // check if arg is boolean -export function isBoolean(arg) { +export function isBoolean(arg: unknown): arg is boolean { return typeof arg === 'boolean' } // check if arg is array -export function isArray(arg) { +export function isArray(arg: unknown): arg is Array { return Array.isArray(arg) } +export function isEmpty(o: unknown): o is null | undefined { + return _.isEmpty(o) +} + +export function isEmptyObject(o: Record): boolean { + return Object.values(o).filter((x) => x !== undefined).length !== 0 +} + // check if arg is a valid date -export function isValidDate(arg) { +export function isValidDate(arg: unknown): arg is Date { + // @ts-expect-error TS(2345): Argument of type 'Date' is not assignable to param... Remove this comment to see the full error message return arg instanceof Date && !isNaN(arg) } // Create a Date string with format: // 'YYYYMMDDTHHmmss' + Z -export function makeDateLong(date) { +export function makeDateLong(date?: Date): string { date = date || new Date() // Gives format like: '2017-08-07T16:28:59.889Z' - date = date.toISOString() + const s = date.toISOString() - return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 13) + date.slice(14, 16) + date.slice(17, 19) + 'Z' + return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 13) + s.slice(14, 16) + s.slice(17, 19) + 'Z' } // Create a Date string with format: // 'YYYYMMDD' -export function makeDateShort(date) { +export function makeDateShort(date?: Date) { date = date || new Date() // Gives format like: '2017-08-07T16:28:59.889Z' - date = date.toISOString() + const s = date.toISOString() - return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 10) + return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 10) } // pipesetup sets up pipe() from left to right os streams array // pipesetup will also make sure that error emitted at any of the upstream Stream // will be emitted at the last stream. This makes error handling simple -export function pipesetup(...streams) { - return streams.reduce((src, dst) => { - src.on('error', (err) => dst.emit('error', err)) - return src.pipe(dst) - }) +export function pipesetup(src: stream.Readable, dst: stream.Writable) { + src.on('error', (err: unknown) => dst.emit('error', err)) + return src.pipe(dst) } // return a Readable stream that emits data -export function readableStream(data) { - var s = new stream.Readable() +export function readableStream(data: unknown): stream.Readable { + const s = new stream.Readable() s._read = () => {} s.push(data) s.push(null) @@ -321,26 +310,30 @@ export function readableStream(data) { } // Process metadata to insert appropriate value to `content-type` attribute -export function insertContentType(metaData, filePath) { +export function insertContentType(metaData: MetaData, filePath: string) { // check if content-type attribute present in metaData - for (var key in metaData) { + for (const key in metaData) { if (key.toLowerCase() === 'content-type') { return metaData } } // if `content-type` attribute is not present in metadata, // then infer it from the extension in filePath - var newMetadata = Object.assign({}, metaData) + const newMetadata = Object.assign({}, metaData) newMetadata['content-type'] = probeContentType(filePath) return newMetadata } // Function prepends metadata with the appropriate prefix if it is not already on -export function prependXAMZMeta(metaData) { - var newMetadata = Object.assign({}, metaData) - for (var key in metaData) { - if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageclassHeader(key)) { - newMetadata['X-Amz-Meta-' + key] = newMetadata[key] +export function prependXAMZMeta(metaData?: MetaData) { + if (!metaData) { + return {} + } + + const newMetadata = Object.assign({}, metaData) + for (const [key, value] of _.entries(metaData)) { + if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageClassHeader(key)) { + newMetadata['X-Amz-Meta-' + key] = value delete newMetadata[key] } } @@ -348,8 +341,8 @@ export function prependXAMZMeta(metaData) { } // Checks if it is a valid header according to the AmazonS3 API -export function isAmzHeader(key) { - var temp = key.toLowerCase() +export function isAmzHeader(key: string) { + const temp = key.toLowerCase() return ( temp.startsWith('x-amz-meta-') || temp === 'x-amz-acl' || @@ -357,9 +350,10 @@ export function isAmzHeader(key) { temp === 'x-amz-server-side-encryption' ) } + // Checks if it is a supported Header -export function isSupportedHeader(key) { - var supported_headers = [ +export function isSupportedHeader(key: string) { + const supported_headers = [ 'content-type', 'cache-control', 'content-encoding', @@ -367,20 +361,23 @@ export function isSupportedHeader(key) { 'content-language', 'x-amz-website-redirect-location', ] - return supported_headers.indexOf(key.toLowerCase()) > -1 + return supported_headers.includes(key.toLowerCase()) } + // Checks if it is a storage header -export function isStorageclassHeader(key) { +export function isStorageClassHeader(key: string) { return key.toLowerCase() === 'x-amz-storage-class' } -export function extractMetadata(metaData) { - var newMetadata = {} - for (var key in metaData) { - if (isSupportedHeader(key) || isStorageclassHeader(key) || isAmzHeader(key)) { +export function extractMetadata(metaData: IncomingHttpHeaders) { + const newMetadata = {} + for (const key in metaData) { + if (isSupportedHeader(key) || isStorageClassHeader(key) || isAmzHeader(key)) { if (key.toLowerCase().startsWith('x-amz-meta-')) { + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message newMetadata[key.slice(11, key.length)] = metaData[key] } else { + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message newMetadata[key] = metaData[key] } } @@ -388,68 +385,68 @@ export function extractMetadata(metaData) { return newMetadata } -export function getVersionId(headers = {}) { - const versionIdValue = headers['x-amz-version-id'] +export function getVersionId(headers: IncomingHttpHeaders = {}) { + const versionIdValue = headers['x-amz-version-id'] as string return versionIdValue || null } -export function getSourceVersionId(headers = {}) { +export function getSourceVersionId(headers: IncomingHttpHeaders = {}) { const sourceVersionId = headers['x-amz-copy-source-version-id'] return sourceVersionId || null } -export function sanitizeETag(etag = '') { - var replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } +export function sanitizeETag(etag = ''): string { + const replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m]) } export const RETENTION_MODES = { GOVERNANCE: 'GOVERNANCE', COMPLIANCE: 'COMPLIANCE', -} +} as const export const RETENTION_VALIDITY_UNITS = { DAYS: 'Days', YEARS: 'Years', -} +} as const export const LEGAL_HOLD_STATUS = { ENABLED: 'ON', DISABLED: 'OFF', -} +} as const -const objectToBuffer = (payload) => { - const payloadBuf = Buffer.from(Buffer.from(payload)) - return payloadBuf +function objectToBuffer(payload: Binary | Uint8Array): Buffer { + // don't know how to write this... + return Buffer.from(payload) } -export const toMd5 = (payload) => { - let payLoadBuf = objectToBuffer(payload) +export function toMd5(payload: Binary | Uint8Array): string { + let payLoadBuf: Binary = objectToBuffer(payload) // use string from browser and buffer from nodejs // browser support is tested only against minio server payLoadBuf = isBrowser ? payLoadBuf.toString() : payLoadBuf - return Crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') + return crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') } -export const toSha256 = (payload) => { - return Crypto.createHash('sha256').update(payload).digest('hex') +export function toSha256(payload: Binary | Uint8Array): string { + return crypto.createHash('sha256').update(payload).digest('hex') } // toArray returns a single element array with param being the element, // if param is just a string, and returns 'param' back if it is an array // So, it makes sure param is always an array -export const toArray = (param) => { +export function toArray(param: T | T[]): Array { if (!Array.isArray(param)) { - return [param] + return [param] as T[] } return param } -export const sanitizeObjectKey = (objectName) => { +export function sanitizeObjectKey(objectName: string): string { // + symbol characters are not decoded as spaces in JS. so replace them first and decode to get the correct result. - let asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') - const sanitizedName = decodeURIComponent(asStrName) - return sanitizedName + const asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') + return decodeURIComponent(asStrName) } export const PART_CONSTRAINTS = { @@ -483,23 +480,27 @@ const ENCRYPTION_HEADERS = { sseGenericHeader: GENERIC_SSE_HEADER, // sseKmsKeyID is the AWS SSE-KMS key id. sseKmsKeyID: GENERIC_SSE_HEADER + '-Aws-Kms-Key-Id', -} +} as const /** * Return Encryption headers * @param encConfig * @returns an object with key value pairs that can be used in headers. */ -function getEncryptionHeaders(encConfig) { +function getEncryptionHeaders(encConfig: Encryption): Record { const encType = encConfig.type const encHeaders = {} - if (!_.isEmpty(encType)) { + if (!isEmpty(encType)) { if (encType === ENCRYPTION_TYPES.SSEC) { return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore [encHeaders[ENCRYPTION_HEADERS.sseGenericHeader]]: 'AES256', } } else if (encType === ENCRYPTION_TYPES.KMS) { return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore [ENCRYPTION_HEADERS.sseGenericHeader]: encConfig.SSEAlgorithm, [ENCRYPTION_HEADERS.sseKmsKeyID]: encConfig.KMSMasterKeyID, } @@ -510,16 +511,28 @@ function getEncryptionHeaders(encConfig) { } export class CopySourceOptions { + public readonly Bucket: string + public readonly Object: string + public readonly VersionID: string + public MatchETag: string + private readonly NoMatchETag: string + private readonly MatchModifiedSince: string | null + private readonly MatchUnmodifiedSince: string | null + public readonly MatchRange: boolean + public readonly Start: number + public readonly End: number + private readonly Encryption?: Encryption + /** * - * @param Bucket __string__ Bucket Name - * @param Object __string__ Object Name - * @param VersionID __string__ Valid versionId - * @param MatchETag __string__ Etag to match - * @param NoMatchETag __string__ Etag to exclude - * @param MatchModifiedSince __string__ Modified Date of the object/part. UTC Date in string format - * @param MatchUnmodifiedSince __string__ Modified Date of the object/part to exclude UTC Date in string format - * @param MatchRange __boolean__ true or false Object range to match + * @param Bucket - Bucket Name + * @param Object - Object Name + * @param VersionID - Valid versionId + * @param MatchETag - Etag to match + * @param NoMatchETag - Etag to exclude + * @param MatchModifiedSince - Modified Date of the object/part. UTC Date in string format + * @param MatchUnmodifiedSince - Modified Date of the object/part to exclude UTC Date in string format + * @param MatchRange - true or false Object range to match * @param Start * @param End * @param Encryption @@ -535,7 +548,19 @@ export class CopySourceOptions { MatchRange = false, Start = 0, End = 0, - Encryption = {}, + Encryption = undefined, + }: { + Bucket?: string + Object?: string + VersionID?: string + MatchETag?: string + NoMatchETag?: string + MatchModifiedSince?: string | null + MatchUnmodifiedSince?: string | null + MatchRange?: boolean + Start?: number + End?: number + Encryption?: Encryption } = {}) { this.Bucket = Bucket this.Object = Object @@ -569,24 +594,24 @@ export class CopySourceOptions { } getHeaders() { - let headerOptions = {} + const headerOptions: Header = {} headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) - if (!_.isEmpty(this.VersionID)) { + if (!isEmpty(this.VersionID)) { headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID } - if (!_.isEmpty(this.MatchETag)) { + if (!isEmpty(this.MatchETag)) { headerOptions['x-amz-copy-source-if-match'] = this.MatchETag } - if (!_.isEmpty(this.NoMatchETag)) { + if (!isEmpty(this.NoMatchETag)) { headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag } - if (!_.isEmpty(this.MatchModifiedSince)) { + if (!isEmpty(this.MatchModifiedSince)) { headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince } - if (!_.isEmpty(this.MatchUnmodifiedSince)) { + if (!isEmpty(this.MatchUnmodifiedSince)) { headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince } @@ -594,30 +619,54 @@ export class CopySourceOptions { } } +export type Encryption = { + type: string + SSEAlgorithm?: string + KMSMasterKeyID?: string +} + export class CopyDestinationOptions { - /* - * @param Bucket __string__ - * @param Object __string__ Object Name for the destination (composed/copied) object defaults - * @param Encryption __object__ Encryption configuration defaults to {} - * @param UserMetadata __object__ - * @param UserTags __object__ | __string__ - * @param LegalHold __string__ ON | OFF - * @param RetainUntilDate __string__ UTC Date String + public readonly Bucket: string + public readonly Object: string + private readonly Encryption?: Encryption + private readonly UserMetadata?: MetaData + private readonly UserTags?: Record | string + private readonly LegalHold?: 'on' | 'off' + private readonly RetainUntilDate?: string + private readonly Mode?: Mode + + /** + * @param Bucket - Bucket name + * @param Object - Object Name for the destination (composed/copied) object defaults + * @param Encryption - Encryption configuration defaults to {} + * @param UserMetadata - + * @param UserTags + * @param LegalHold - + * @param RetainUntilDate - UTC Date String * @param Mode */ constructor({ - Bucket = '', - Object = '', - Encryption = null, - UserMetadata = null, - UserTags = null, - LegalHold = null, - RetainUntilDate = null, - Mode = null, // + Bucket, + Object, + Encryption, + UserMetadata, + UserTags, + LegalHold, + RetainUntilDate, + Mode, + }: { + Bucket: string + Object: string + Encryption?: Encryption + UserMetadata?: MetaData + UserTags?: Record | string + LegalHold?: 'on' | 'off' + RetainUntilDate?: string + Mode?: Mode }) { this.Bucket = Bucket this.Object = Object - this.Encryption = Encryption + this.Encryption = Encryption ?? undefined // null input will become undefined, easy for runtime assert this.UserMetadata = UserMetadata this.UserTags = UserTags this.LegalHold = LegalHold @@ -625,47 +674,43 @@ export class CopyDestinationOptions { this.RetainUntilDate = RetainUntilDate } - getHeaders() { + getHeaders(): Record { const replaceDirective = 'REPLACE' - const headerOptions = {} + const headerOptions: Record = {} const userTags = this.UserTags - if (!_.isEmpty(userTags)) { + if (!isEmpty(userTags)) { headerOptions['X-Amz-Tagging-Directive'] = replaceDirective - headerOptions['X-Amz-Tagging'] = isObject(userTags) - ? querystring.stringify(userTags) - : isString(userTags) - ? userTags - : '' + headerOptions['X-Amz-Tagging'] = isObject(userTags) ? qs(userTags) : isString(userTags) ? userTags : '' } - if (!_.isEmpty(this.Mode)) { + if (this.Mode) { headerOptions['X-Amz-Object-Lock-Mode'] = this.Mode // GOVERNANCE or COMPLIANCE } - if (!_.isEmpty(this.RetainUntilDate)) { + if (this.RetainUntilDate) { headerOptions['X-Amz-Object-Lock-Retain-Until-Date'] = this.RetainUntilDate // needs to be UTC. } - if (!_.isEmpty(this.LegalHold)) { + if (this.LegalHold) { headerOptions['X-Amz-Object-Lock-Legal-Hold'] = this.LegalHold // ON or OFF } - if (!_.isEmpty(this.UserMetadata)) { - const headerKeys = Object.keys(this.UserMetadata) - headerKeys.forEach((key) => { - headerOptions[`X-Amz-Meta-${key}`] = this.UserMetadata[key] - }) + if (this.UserMetadata) { + for (const [key, value] of Object.entries(this.UserMetadata)) { + headerOptions[`X-Amz-Meta-${key}`] = value.toString() + } } - if (!_.isEmpty(this.Encryption)) { + if (this.Encryption) { const encryptionHeaders = getEncryptionHeaders(this.Encryption) - Object.keys(encryptionHeaders).forEach((key) => { - headerOptions[key] = encryptionHeaders[key] - }) + for (const [key, value] of Object.entries(encryptionHeaders)) { + headerOptions[key] = value + } } return headerOptions } + validate() { if (!isValidBucketName(this.Bucket)) { throw new errors.InvalidBucketNameError('Invalid Destination bucket name: ' + this.Bucket) @@ -673,25 +718,25 @@ export class CopyDestinationOptions { if (!isValidObjectName(this.Object)) { throw new errors.InvalidObjectNameError(`Invalid Destination object name: ${this.Object}`) } - if (!_.isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) { + if (!isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) { throw new errors.InvalidObjectNameError(`Destination UserMetadata should be an object with key value pairs`) } - if (!_.isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) { + if (!isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) { throw new errors.InvalidObjectNameError( `Invalid Mode specified for destination object it should be one of [GOVERNANCE,COMPLIANCE]`, ) } - if (!_.isEmpty(this.Encryption) && _.isEmpty(this.Encryption)) { + if (this.Encryption !== undefined && isEmptyObject(this.Encryption)) { throw new errors.InvalidObjectNameError(`Invalid Encryption configuration for destination object `) } return true } } -export const partsRequired = (size) => { - let maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) +export function partsRequired(size: number): number { + const maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) let requiredPartSize = size / maxPartSize if (size % maxPartSize > 0) { requiredPartSize++ @@ -704,19 +749,16 @@ export const partsRequired = (size) => { // start and end index slices. Splits happen evenly to be sure that no // part is less than 5MiB, as that could fail the multipart request if // it is not the last part. - -let startIndexParts = [] -let endIndexParts = [] -export function calculateEvenSplits(size, objInfo) { +export function calculateEvenSplits(size: number, objInfo: { Start?: unknown; Bucket: string; Object: string }) { if (size === 0) { return null } const reqParts = partsRequired(size) - startIndexParts = new Array(reqParts) - endIndexParts = new Array(reqParts) + const startIndexParts = new Array(reqParts) + const endIndexParts = new Array(reqParts) - let start = objInfo.Start - if (_.isEmpty(objInfo.Start) || start === -1) { + let start = objInfo.Start as number + if (isEmpty(objInfo.Start) || start === -1) { start = 0 } const divisorValue = Math.trunc(size / reqParts) @@ -732,7 +774,7 @@ export function calculateEvenSplits(size, objInfo) { } const currentStart = nextStart - let currentEnd = currentStart + curPartSize - 1 + const currentEnd = currentStart + curPartSize - 1 nextStart = currentEnd + 1 startIndexParts[i] = currentStart @@ -742,31 +784,31 @@ export function calculateEvenSplits(size, objInfo) { return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo } } -export function removeDirAndFiles(dirPath, removeSelf) { - if (removeSelf === undefined) { - removeSelf = true - } +export function removeDirAndFiles(dirPath: string, removeSelf = true) { + let files try { - var files = fs.readdirSync(dirPath) + files = fs.readdirSync(dirPath) } catch (e) { return } - if (files.length > 0) { - for (var i = 0; i < files.length; i++) { - var filePath = path.join(dirPath, files[i]) - if (fs.statSync(filePath).isFile()) { - fs.unlinkSync(filePath) - } else { - removeDirAndFiles(filePath) - } + + for (const item of files) { + const filePath = path.join(dirPath, item) + if (fs.statSync(filePath).isFile()) { + fs.unlinkSync(filePath) + } else { + removeDirAndFiles(filePath, true) } } + if (removeSelf) { fs.rmdirSync(dirPath) } } -export const parseXml = (xml) => { +const fxp = new XMLParser() + +export function parseXml(xml: string): any { let result = null result = fxp.parse(xml) if (result.Error) { @@ -776,12 +818,23 @@ export const parseXml = (xml) => { return result } +// maybe this should be a generic type for Records, leave it for later refactor export class SelectResults { + private records?: unknown + private response?: unknown + private stats?: string + private progress?: unknown + constructor({ records, // parsed data as stream response, // original response stream stats, // stats as xml progress, // stats as xml + }: { + records?: unknown + response?: unknown + stats?: string + progress?: unknown }) { this.records = records this.response = response @@ -789,32 +842,35 @@ export class SelectResults { this.progress = progress } - setStats(stats) { + setStats(stats: string) { this.stats = stats } + getStats() { return this.stats } - setProgress(progress) { + setProgress(progress: unknown) { this.progress = progress } + getProgress() { return this.progress } - setResponse(response) { + setResponse(response: unknown) { this.response = response } + getResponse() { return this.response } - setRecords(records) { + setRecords(records: unknown) { this.records = records } - getRecords() { + getRecords(): unknown { return this.records } } diff --git a/src/minio.js b/src/minio.js deleted file mode 100644 index 2567e254..00000000 --- a/src/minio.js +++ /dev/null @@ -1,3987 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as fs from 'node:fs' -import * as Http from 'node:http' -import * as Https from 'node:https' -import * as path from 'node:path' -import * as Stream from 'node:stream' - -import async from 'async' -import BlockStream2 from 'block-stream2' -import _ from 'lodash' -import mkdirp from 'mkdirp' -import * as querystring from 'query-string' -import { TextEncoder } from 'web-encoding' -import Xml from 'xml' -import xml2js from 'xml2js' - -import { CredentialProvider } from './CredentialProvider.js' -import * as errors from './errors.ts' -import { extensions } from './extensions.js' -import { - calculateEvenSplits, - CopyDestinationOptions, - CopySourceOptions, - DEFAULT_REGION, - extractMetadata, - getScope, - getSourceVersionId, - getVersionId, - insertContentType, - isAmazonEndpoint, - isArray, - isBoolean, - isFunction, - isNumber, - isObject, - isReadableStream, - isString, - isValidBucketName, - isValidDate, - isValidEndpoint, - isValidObjectName, - isValidPort, - isValidPrefix, - isVirtualHostStyle, - LEGAL_HOLD_STATUS, - makeDateLong, - PART_CONSTRAINTS, - partsRequired, - pipesetup, - prependXAMZMeta, - promisify, - readableStream, - RETENTION_MODES, - RETENTION_VALIDITY_UNITS, - sanitizeETag, - toMd5, - toSha256, - uriEscape, - uriResourceEscape, -} from './helpers.js' -import { NotificationConfig, NotificationPoller } from './notification.js' -import { ObjectUploader } from './object-uploader.js' -import { getS3Endpoint } from './s3-endpoints.js' -import { postPresignSignatureV4, presignSignatureV4, signV4 } from './signing.js' -import * as transformers from './transformers.js' -import { parseSelectObjectContentResponse } from './xml-parsers.js' - -// will be replaced by bundler -const Package = { version: process.env.MINIO_JS_PACKAGE_VERSION || 'development' } - -export * from './helpers.js' -export * from './notification.js' - -export class Client { - constructor(params) { - if (typeof params.secure !== 'undefined') { - throw new Error('"secure" option deprecated, "useSSL" should be used instead') - } - // Default values if not specified. - if (typeof params.useSSL === 'undefined') { - params.useSSL = true - } - if (!params.port) { - params.port = 0 - } - // Validate input params. - if (!isValidEndpoint(params.endPoint)) { - throw new errors.InvalidEndpointError(`Invalid endPoint : ${params.endPoint}`) - } - if (!isValidPort(params.port)) { - throw new errors.InvalidArgumentError(`Invalid port : ${params.port}`) - } - if (!isBoolean(params.useSSL)) { - throw new errors.InvalidArgumentError( - `Invalid useSSL flag type : ${params.useSSL}, expected to be of type "boolean"`, - ) - } - - // Validate region only if its set. - if (params.region) { - if (!isString(params.region)) { - throw new errors.InvalidArgumentError(`Invalid region : ${params.region}`) - } - } - - var host = params.endPoint.toLowerCase() - var port = params.port - var protocol = '' - var transport - var transportAgent - // Validate if configuration is not using SSL - // for constructing relevant endpoints. - if (params.useSSL === false) { - transport = Http - protocol = 'http:' - if (port === 0) { - port = 80 - } - transportAgent = Http.globalAgent - } else { - // Defaults to secure. - transport = Https - protocol = 'https:' - if (port === 0) { - port = 443 - } - transportAgent = Https.globalAgent - } - - // if custom transport is set, use it. - if (params.transport) { - if (!isObject(params.transport)) { - throw new errors.InvalidArgumentError( - `Invalid transport type : ${params.transport}, expected to be type "object"`, - ) - } - transport = params.transport - } - - // if custom transport agent is set, use it. - if (params.transportAgent) { - if (!isObject(params.transportAgent)) { - throw new errors.InvalidArgumentError( - `Invalid transportAgent type: ${params.transportAgent}, expected to be type "object"`, - ) - } - - transportAgent = params.transportAgent - } - - // User Agent should always following the below style. - // Please open an issue to discuss any new changes here. - // - // MinIO (OS; ARCH) LIB/VER APP/VER - // - var libraryComments = `(${process.platform}; ${process.arch})` - var libraryAgent = `MinIO ${libraryComments} minio-js/${Package.version}` - // User agent block ends. - - this.transport = transport - this.transportAgent = transportAgent - this.host = host - this.port = port - this.protocol = protocol - this.accessKey = params.accessKey - this.secretKey = params.secretKey - this.sessionToken = params.sessionToken - this.userAgent = `${libraryAgent}` - - // Default path style is true - if (params.pathStyle === undefined) { - this.pathStyle = true - } else { - this.pathStyle = params.pathStyle - } - - if (!this.accessKey) { - this.accessKey = '' - } - if (!this.secretKey) { - this.secretKey = '' - } - this.anonymous = !this.accessKey || !this.secretKey - - if (params.credentialsProvider) { - this.credentialsProvider = params.credentialsProvider - this.checkAndRefreshCreds() - } - - this.regionMap = {} - if (params.region) { - this.region = params.region - } - - this.partSize = 64 * 1024 * 1024 - if (params.partSize) { - this.partSize = params.partSize - this.overRidePartSize = true - } - if (this.partSize < 5 * 1024 * 1024) { - throw new errors.InvalidArgumentError(`Part size should be greater than 5MB`) - } - if (this.partSize > 5 * 1024 * 1024 * 1024) { - throw new errors.InvalidArgumentError(`Part size should be less than 5GB`) - } - - this.maximumPartSize = 5 * 1024 * 1024 * 1024 - this.maxObjectSize = 5 * 1024 * 1024 * 1024 * 1024 - // SHA256 is enabled only for authenticated http requests. If the request is authenticated - // and the connection is https we use x-amz-content-sha256=UNSIGNED-PAYLOAD - // header for signature calculation. - this.enableSHA256 = !this.anonymous && !params.useSSL - - this.s3AccelerateEndpoint = params.s3AccelerateEndpoint || null - this.reqOptions = {} - } - - // This is s3 Specific and does not hold validity in any other Object storage. - getAccelerateEndPointIfSet(bucketName, objectName) { - if (!_.isEmpty(this.s3AccelerateEndpoint) && !_.isEmpty(bucketName) && !_.isEmpty(objectName)) { - // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html - // Disable transfer acceleration for non-compliant bucket names. - if (bucketName.indexOf('.') !== -1) { - throw new Error(`Transfer Acceleration is not supported for non compliant bucket:${bucketName}`) - } - // If transfer acceleration is requested set new host. - // For more details about enabling transfer acceleration read here. - // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html - return this.s3AccelerateEndpoint - } - return false - } - - /** - * @param endPoint _string_ valid S3 acceleration end point - */ - setS3TransferAccelerate(endPoint) { - this.s3AccelerateEndpoint = endPoint - } - - // Sets the supported request options. - setRequestOptions(options) { - if (!isObject(options)) { - throw new TypeError('request options should be of type "object"') - } - this.reqOptions = _.pick(options, [ - 'agent', - 'ca', - 'cert', - 'ciphers', - 'clientCertEngine', - 'crl', - 'dhparam', - 'ecdhCurve', - 'family', - 'honorCipherOrder', - 'key', - 'passphrase', - 'pfx', - 'rejectUnauthorized', - 'secureOptions', - 'secureProtocol', - 'servername', - 'sessionIdContext', - ]) - } - - // returns *options* object that can be used with http.request() - // Takes care of constructing virtual-host-style or path-style hostname - getRequestOptions(opts) { - var method = opts.method - var region = opts.region - var bucketName = opts.bucketName - var objectName = opts.objectName - var headers = opts.headers - var query = opts.query - - var reqOptions = { method } - reqOptions.headers = {} - - // If custom transportAgent was supplied earlier, we'll inject it here - reqOptions.agent = this.transportAgent - - // Verify if virtual host supported. - var virtualHostStyle - if (bucketName) { - virtualHostStyle = isVirtualHostStyle(this.host, this.protocol, bucketName, this.pathStyle) - } - - if (this.port) { - reqOptions.port = this.port - } - reqOptions.protocol = this.protocol - - if (objectName) { - objectName = `${uriResourceEscape(objectName)}` - } - - reqOptions.path = '/' - - // Save host. - reqOptions.host = this.host - // For Amazon S3 endpoint, get endpoint based on region. - if (isAmazonEndpoint(reqOptions.host)) { - const accelerateEndPoint = this.getAccelerateEndPointIfSet(bucketName, objectName) - if (accelerateEndPoint) { - reqOptions.host = `${accelerateEndPoint}` - } else { - reqOptions.host = getS3Endpoint(region) - } - } - - if (virtualHostStyle && !opts.pathStyle) { - // For all hosts which support virtual host style, `bucketName` - // is part of the hostname in the following format: - // - // var host = 'bucketName.example.com' - // - if (bucketName) { - reqOptions.host = `${bucketName}.${reqOptions.host}` - } - if (objectName) { - reqOptions.path = `/${objectName}` - } - } else { - // For all S3 compatible storage services we will fallback to - // path style requests, where `bucketName` is part of the URI - // path. - if (bucketName) { - reqOptions.path = `/${bucketName}` - } - if (objectName) { - reqOptions.path = `/${bucketName}/${objectName}` - } - } - - if (query) { - reqOptions.path += `?${query}` - } - reqOptions.headers.host = reqOptions.host - if ( - (reqOptions.protocol === 'http:' && reqOptions.port !== 80) || - (reqOptions.protocol === 'https:' && reqOptions.port !== 443) - ) { - reqOptions.headers.host = `${reqOptions.host}:${reqOptions.port}` - } - reqOptions.headers['user-agent'] = this.userAgent - if (headers) { - // have all header keys in lower case - to make signing easy - _.map(headers, (v, k) => (reqOptions.headers[k.toLowerCase()] = v)) - } - - // Use any request option specified in minioClient.setRequestOptions() - reqOptions = Object.assign({}, this.reqOptions, reqOptions) - - return reqOptions - } - - // Set application specific information. - // - // Generates User-Agent in the following style. - // - // MinIO (OS; ARCH) LIB/VER APP/VER - // - // __Arguments__ - // * `appName` _string_ - Application name. - // * `appVersion` _string_ - Application version. - setAppInfo(appName, appVersion) { - if (!isString(appName)) { - throw new TypeError(`Invalid appName: ${appName}`) - } - if (appName.trim() === '') { - throw new errors.InvalidArgumentError('Input appName cannot be empty.') - } - if (!isString(appVersion)) { - throw new TypeError(`Invalid appVersion: ${appVersion}`) - } - if (appVersion.trim() === '') { - throw new errors.InvalidArgumentError('Input appVersion cannot be empty.') - } - this.userAgent = `${this.userAgent} ${appName}/${appVersion}` - } - - // Calculate part size given the object size. Part size will be atleast this.partSize - calculatePartSize(size) { - if (!isNumber(size)) { - throw new TypeError('size should be of type "number"') - } - if (size > this.maxObjectSize) { - throw new TypeError(`size should not be more than ${this.maxObjectSize}`) - } - if (this.overRidePartSize) { - return this.partSize - } - var partSize = this.partSize - for (;;) { - // while(true) {...} throws linting error. - // If partSize is big enough to accomodate the object size, then use it. - if (partSize * 10000 > size) { - return partSize - } - // Try part sizes as 64MB, 80MB, 96MB etc. - partSize += 16 * 1024 * 1024 - } - } - - // log the request, response, error - logHTTP(reqOptions, response, err) { - // if no logstreamer available return. - if (!this.logStream) { - return - } - if (!isObject(reqOptions)) { - throw new TypeError('reqOptions should be of type "object"') - } - if (response && !isReadableStream(response)) { - throw new TypeError('response should be of type "Stream"') - } - if (err && !(err instanceof Error)) { - throw new TypeError('err should be of type "Error"') - } - var logHeaders = (headers) => { - _.forEach(headers, (v, k) => { - if (k == 'authorization') { - var redacter = new RegExp('Signature=([0-9a-f]+)') - v = v.replace(redacter, 'Signature=**REDACTED**') - } - this.logStream.write(`${k}: ${v}\n`) - }) - this.logStream.write('\n') - } - this.logStream.write(`REQUEST: ${reqOptions.method} ${reqOptions.path}\n`) - logHeaders(reqOptions.headers) - if (response) { - this.logStream.write(`RESPONSE: ${response.statusCode}\n`) - logHeaders(response.headers) - } - if (err) { - this.logStream.write('ERROR BODY:\n') - var errJSON = JSON.stringify(err, null, '\t') - this.logStream.write(`${errJSON}\n`) - } - } - - // Enable tracing - traceOn(stream) { - if (!stream) { - stream = process.stdout - } - this.logStream = stream - } - - // Disable tracing - traceOff() { - this.logStream = null - } - - // makeRequest is the primitive used by the apis for making S3 requests. - // payload can be empty string in case of no payload. - // statusCode is the expected statusCode. If response.statusCode does not match - // we parse the XML error and call the callback with the error message. - // A valid region is passed by the calls - listBuckets, makeBucket and - // getBucketRegion. - makeRequest(options, payload, statusCodes, region, returnResponse, cb) { - if (!isObject(options)) { - throw new TypeError('options should be of type "object"') - } - if (!isString(payload) && !isObject(payload)) { - // Buffer is of type 'object' - throw new TypeError('payload should be of type "string" or "Buffer"') - } - statusCodes.forEach((statusCode) => { - if (!isNumber(statusCode)) { - throw new TypeError('statusCode should be of type "number"') - } - }) - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isBoolean(returnResponse)) { - throw new TypeError('returnResponse should be of type "boolean"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - if (!options.headers) { - options.headers = {} - } - if (options.method === 'POST' || options.method === 'PUT' || options.method === 'DELETE') { - options.headers['content-length'] = payload.length - } - var sha256sum = '' - if (this.enableSHA256) { - sha256sum = toSha256(payload) - } - var stream = readableStream(payload) - this.makeRequestStream(options, stream, sha256sum, statusCodes, region, returnResponse, cb) - } - - // makeRequestStream will be used directly instead of makeRequest in case the payload - // is available as a stream. for ex. putObject - makeRequestStream(options, stream, sha256sum, statusCodes, region, returnResponse, cb) { - if (!isObject(options)) { - throw new TypeError('options should be of type "object"') - } - if (!isReadableStream(stream)) { - throw new errors.InvalidArgumentError('stream should be a readable Stream') - } - if (!isString(sha256sum)) { - throw new TypeError('sha256sum should be of type "string"') - } - statusCodes.forEach((statusCode) => { - if (!isNumber(statusCode)) { - throw new TypeError('statusCode should be of type "number"') - } - }) - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isBoolean(returnResponse)) { - throw new TypeError('returnResponse should be of type "boolean"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - // sha256sum will be empty for anonymous or https requests - if (!this.enableSHA256 && sha256sum.length !== 0) { - throw new errors.InvalidArgumentError(`sha256sum expected to be empty for anonymous or https requests`) - } - // sha256sum should be valid for non-anonymous http requests. - if (this.enableSHA256 && sha256sum.length !== 64) { - throw new errors.InvalidArgumentError(`Invalid sha256sum : ${sha256sum}`) - } - - var _makeRequest = (e, region) => { - if (e) { - return cb(e) - } - options.region = region - var reqOptions = this.getRequestOptions(options) - if (!this.anonymous) { - // For non-anonymous https requests sha256sum is 'UNSIGNED-PAYLOAD' for signature calculation. - if (!this.enableSHA256) { - sha256sum = 'UNSIGNED-PAYLOAD' - } - - let date = new Date() - - reqOptions.headers['x-amz-date'] = makeDateLong(date) - reqOptions.headers['x-amz-content-sha256'] = sha256sum - if (this.sessionToken) { - reqOptions.headers['x-amz-security-token'] = this.sessionToken - } - - this.checkAndRefreshCreds() - var authorization = signV4(reqOptions, this.accessKey, this.secretKey, region, date) - reqOptions.headers.authorization = authorization - } - var req = this.transport.request(reqOptions, (response) => { - if (!statusCodes.includes(response.statusCode)) { - // For an incorrect region, S3 server always sends back 400. - // But we will do cache invalidation for all errors so that, - // in future, if AWS S3 decides to send a different status code or - // XML error code we will still work fine. - delete this.regionMap[options.bucketName] - var errorTransformer = transformers.getErrorTransformer(response) - pipesetup(response, errorTransformer).on('error', (e) => { - this.logHTTP(reqOptions, response, e) - cb(e) - }) - return - } - this.logHTTP(reqOptions, response) - if (returnResponse) { - return cb(null, response) - } - // We drain the socket so that the connection gets closed. Note that this - // is not expensive as the socket will not have any data. - response.on('data', () => {}) - cb(null) - }) - let pipe = pipesetup(stream, req) - pipe.on('error', (e) => { - this.logHTTP(reqOptions, null, e) - cb(e) - }) - } - if (region) { - return _makeRequest(null, region) - } - this.getBucketRegion(options.bucketName, _makeRequest) - } - - // gets the region of the bucket - getBucketRegion(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - - // Region is set with constructor, return the region right here. - if (this.region) { - return cb(null, this.region) - } - - if (this.regionMap[bucketName]) { - return cb(null, this.regionMap[bucketName]) - } - var extractRegion = (response) => { - var transformer = transformers.getBucketRegionTransformer() - var region = DEFAULT_REGION - pipesetup(response, transformer) - .on('error', cb) - .on('data', (data) => { - if (data) { - region = data - } - }) - .on('end', () => { - this.regionMap[bucketName] = region - cb(null, region) - }) - } - - var method = 'GET' - var query = 'location' - - // `getBucketLocation` behaves differently in following ways for - // different environments. - // - // - For nodejs env we default to path style requests. - // - For browser env path style requests on buckets yields CORS - // error. To circumvent this problem we make a virtual host - // style request signed with 'us-east-1'. This request fails - // with an error 'AuthorizationHeaderMalformed', additionally - // the error XML also provides Region of the bucket. To validate - // this region is proper we retry the same request with the newly - // obtained region. - var pathStyle = this.pathStyle && typeof window === 'undefined' - - this.makeRequest({ method, bucketName, query, pathStyle }, '', [200], DEFAULT_REGION, true, (e, response) => { - if (e) { - if (e.name === 'AuthorizationHeaderMalformed') { - var region = e.Region - if (!region) { - return cb(e) - } - this.makeRequest({ method, bucketName, query }, '', [200], region, true, (e, response) => { - if (e) { - return cb(e) - } - extractRegion(response) - }) - return - } - return cb(e) - } - extractRegion(response) - }) - } - - // Creates the bucket `bucketName`. - // - // __Arguments__ - // * `bucketName` _string_ - Name of the bucket - // * `region` _string_ - region valid values are _us-west-1_, _us-west-2_, _eu-west-1_, _eu-central-1_, _ap-southeast-1_, _ap-northeast-1_, _ap-southeast-2_, _sa-east-1_. - // * `makeOpts` _object_ - Options to create a bucket. e.g {ObjectLocking:true} (Optional) - // * `callback(err)` _function_ - callback function with `err` as the error argument. `err` is null if the bucket is successfully created. - makeBucket(bucketName, region, makeOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - // Backward Compatibility - if (isObject(region)) { - cb = makeOpts - makeOpts = region - region = '' - } - if (isFunction(region)) { - cb = region - region = '' - makeOpts = {} - } - if (isFunction(makeOpts)) { - cb = makeOpts - makeOpts = {} - } - - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isObject(makeOpts)) { - throw new TypeError('makeOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var payload = '' - - // Region already set in constructor, validate if - // caller requested bucket location is same. - if (region && this.region) { - if (region !== this.region) { - throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`) - } - } - // sending makeBucket request with XML containing 'us-east-1' fails. For - // default region server expects the request without body - if (region && region !== DEFAULT_REGION) { - var createBucketConfiguration = [] - createBucketConfiguration.push({ - _attr: { - xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', - }, - }) - createBucketConfiguration.push({ - LocationConstraint: region, - }) - var payloadObject = { - CreateBucketConfiguration: createBucketConfiguration, - } - payload = Xml(payloadObject) - } - var method = 'PUT' - var headers = {} - - if (makeOpts.ObjectLocking) { - headers['x-amz-bucket-object-lock-enabled'] = true - } - - if (!region) { - region = DEFAULT_REGION - } - - const processWithRetry = (err) => { - if (err && (region === '' || region === DEFAULT_REGION)) { - if (err.code === 'AuthorizationHeaderMalformed' && err.region !== '') { - // Retry with region returned as part of error - this.makeRequest({ method, bucketName, headers }, payload, [200], err.region, false, cb) - } else { - return cb && cb(err) - } - } - return cb && cb(err) - } - this.makeRequest({ method, bucketName, headers }, payload, [200], region, false, processWithRetry) - } - - // List of buckets created. - // - // __Arguments__ - // * `callback(err, buckets)` _function_ - callback function with error as the first argument. `buckets` is an array of bucket information - // - // `buckets` array element: - // * `bucket.name` _string_ : bucket name - // * `bucket.creationDate` _Date_: date when bucket was created - listBuckets(cb) { - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'GET' - this.makeRequest({ method }, '', [200], DEFAULT_REGION, true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getListBucketTransformer() - var buckets - pipesetup(response, transformer) - .on('data', (result) => (buckets = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, buckets)) - }) - } - - // Returns a stream that emits objects that are partially uploaded. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: prefix of the object names that are partially uploaded (optional, default `''`) - // * `recursive` _bool_: directory style listing when false, recursive listing when true (optional, default `false`) - // - // __Return Value__ - // * `stream` _Stream_ : emits objects of the format: - // * `object.key` _string_: name of the object - // * `object.uploadId` _string_: upload ID of the object - // * `object.size` _Integer_: size of the partially uploaded object - listIncompleteUploads(bucket, prefix, recursive) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (!isValidBucketName(bucket)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - var delimiter = recursive ? '' : '/' - var keyMarker = '' - var uploadIdMarker = '' - var uploads = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one upload info per _read() - if (uploads.length) { - return readStream.push(uploads.shift()) - } - if (ended) { - return readStream.push(null) - } - this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - result.prefixes.forEach((prefix) => uploads.push(prefix)) - async.eachSeries( - result.uploads, - (upload, cb) => { - // for each incomplete upload add the sizes of its uploaded parts - this.listParts(bucket, upload.key, upload.uploadId, (err, parts) => { - if (err) { - return cb(err) - } - upload.size = parts.reduce((acc, item) => acc + item.size, 0) - uploads.push(upload) - cb() - }) - }, - (err) => { - if (err) { - readStream.emit('error', err) - return - } - if (result.isTruncated) { - keyMarker = result.nextKeyMarker - uploadIdMarker = result.nextUploadIdMarker - } else { - ended = true - } - readStream._read() - }, - ) - }) - } - return readStream - } - - // To check if a bucket already exists. - // - // __Arguments__ - // * `bucketName` _string_ : name of the bucket - // * `callback(err)` _function_ : `err` is `null` if the bucket exists - bucketExists(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'HEAD' - this.makeRequest({ method, bucketName }, '', [200], '', false, (err) => { - if (err) { - if (err.code == 'NoSuchBucket' || err.code == 'NotFound') { - return cb(null, false) - } - return cb(err) - } - cb(null, true) - }) - } - - // Remove a bucket. - // - // __Arguments__ - // * `bucketName` _string_ : name of the bucket - // * `callback(err)` _function_ : `err` is `null` if the bucket is removed successfully. - removeBucket(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'DELETE' - this.makeRequest({ method, bucketName }, '', [204], '', false, (e) => { - // If the bucket was successfully removed, remove the region map entry. - if (!e) { - delete this.regionMap[bucketName] - } - cb(e) - }) - } - - // Remove the partially uploaded object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `callback(err)` _function_: callback function is called with non `null` value in case of error - removeIncompleteUpload(bucketName, objectName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.IsValidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var removeUploadId - async.during( - (cb) => { - this.findUploadId(bucketName, objectName, (e, uploadId) => { - if (e) { - return cb(e) - } - removeUploadId = uploadId - cb(null, uploadId) - }) - }, - (cb) => { - var method = 'DELETE' - var query = `uploadId=${removeUploadId}` - this.makeRequest({ method, bucketName, objectName, query }, '', [204], '', false, (e) => cb(e)) - }, - cb, - ) - } - - // Callback is called with `error` in case of error or `null` in case of success - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `filePath` _string_: path to which the object data will be written to - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err)` _function_: callback is called with `err` in case of error. - fGetObject(bucketName, objectName, filePath, getOpts = {}, cb) { - // Input validation. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(filePath)) { - throw new TypeError('filePath should be of type "string"') - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - // Internal data. - var partFile - var partFileStream - var objStat - - // Rename wrapper. - var rename = (err) => { - if (err) { - return cb(err) - } - fs.rename(partFile, filePath, cb) - } - - async.waterfall( - [ - (cb) => this.statObject(bucketName, objectName, getOpts, cb), - (result, cb) => { - objStat = result - // Create any missing top level directories. - mkdirp(path.dirname(filePath), cb) - }, - (ignore, cb) => { - partFile = `${filePath}.${objStat.etag}.part.minio` - fs.stat(partFile, (e, stats) => { - var offset = 0 - if (e) { - partFileStream = fs.createWriteStream(partFile, { flags: 'w' }) - } else { - if (objStat.size === stats.size) { - return rename() - } - offset = stats.size - partFileStream = fs.createWriteStream(partFile, { flags: 'a' }) - } - this.getPartialObject(bucketName, objectName, offset, 0, getOpts, cb) - }) - }, - (downloadStream, cb) => { - pipesetup(downloadStream, partFileStream) - .on('error', (e) => cb(e)) - .on('finish', cb) - }, - (cb) => fs.stat(partFile, cb), - (stats, cb) => { - if (stats.size === objStat.size) { - return cb() - } - cb(new Error('Size mismatch between downloaded file and the object')) - }, - ], - rename, - ) - } - - // Callback is called with readable stream of the object content. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream - getObject(bucketName, objectName, getOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - this.getPartialObject(bucketName, objectName, 0, 0, getOpts, cb) - } - - // Callback is called with readable stream of the partial object content. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `offset` _number_: offset of the object from where the stream will start - // * `length` _number_: length of the object that will be read in the stream (optional, if not specified we read the rest of the file from the offset) - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream - getPartialObject(bucketName, objectName, offset, length, getOpts = {}, cb) { - if (isFunction(length)) { - cb = length - length = 0 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isNumber(offset)) { - throw new TypeError('offset should be of type "number"') - } - if (!isNumber(length)) { - throw new TypeError('length should be of type "number"') - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var range = '' - if (offset || length) { - if (offset) { - range = `bytes=${+offset}-` - } else { - range = 'bytes=0-' - offset = 0 - } - if (length) { - range += `${+length + offset - 1}` - } - } - - var headers = {} - if (range !== '') { - headers.range = range - } - - var expectedStatusCodes = [200] - if (range) { - expectedStatusCodes.push(206) - } - var method = 'GET' - - var query = querystring.stringify(getOpts) - this.makeRequest({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes, '', true, cb) - } - - // Uploads the object using contents from a file - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `filePath` _string_: file path of the file to be uploaded - // * `metaData` _Javascript Object_: metaData assosciated with the object - // * `callback(err, objInfo)` _function_: non null `err` indicates error, `objInfo` _object_ which contains versionId and etag. - fPutObject(bucketName, objectName, filePath, metaData, callback) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (!isString(filePath)) { - throw new TypeError('filePath should be of type "string"') - } - if (isFunction(metaData)) { - callback = metaData - metaData = {} // Set metaData empty if no metaData provided. - } - if (!isObject(metaData)) { - throw new TypeError('metaData should be of type "object"') - } - - // Inserts correct `content-type` attribute based on metaData and filePath - metaData = insertContentType(metaData, filePath) - - // Updates metaData to have the correct prefix if needed - metaData = prependXAMZMeta(metaData) - var size - var partSize - - async.waterfall( - [ - (cb) => fs.stat(filePath, cb), - (stats, cb) => { - size = stats.size - var stream - var cbTriggered = false - var origCb = cb - cb = function () { - if (cbTriggered) { - return - } - cbTriggered = true - if (stream) { - stream.destroy() - } - return origCb.apply(this, arguments) - } - if (size > this.maxObjectSize) { - return cb(new Error(`${filePath} size : ${stats.size}, max allowed size : 5TB`)) - } - if (size <= this.partSize) { - // simple PUT request, no multipart - var multipart = false - var uploader = this.getUploader(bucketName, objectName, metaData, multipart) - var hash = transformers.getHashSummer(this.enableSHA256) - var start = 0 - var end = size - 1 - var autoClose = true - if (size === 0) { - end = 0 - } - var options = { start, end, autoClose } - pipesetup(fs.createReadStream(filePath, options), hash) - .on('data', (data) => { - var md5sum = data.md5sum - var sha256sum = data.sha256sum - stream = fs.createReadStream(filePath, options) - uploader(stream, size, sha256sum, md5sum, (err, objInfo) => { - callback(err, objInfo) - cb(true) - }) - }) - .on('error', (e) => cb(e)) - return - } - this.findUploadId(bucketName, objectName, cb) - }, - (uploadId, cb) => { - // if there was a previous incomplete upload, fetch all its uploaded parts info - if (uploadId) { - return this.listParts(bucketName, objectName, uploadId, (e, etags) => cb(e, uploadId, etags)) - } - // there was no previous upload, initiate a new one - this.initiateNewMultipartUpload(bucketName, objectName, metaData, (e, uploadId) => cb(e, uploadId, [])) - }, - (uploadId, etags, cb) => { - partSize = this.calculatePartSize(size) - var multipart = true - var uploader = this.getUploader(bucketName, objectName, metaData, multipart) - - // convert array to object to make things easy - var parts = etags.reduce(function (acc, item) { - if (!acc[item.part]) { - acc[item.part] = item - } - return acc - }, {}) - var partsDone = [] - var partNumber = 1 - var uploadedSize = 0 - async.whilst( - (cb) => { - cb(null, uploadedSize < size) - }, - (cb) => { - var stream - var cbTriggered = false - var origCb = cb - cb = function () { - if (cbTriggered) { - return - } - cbTriggered = true - if (stream) { - stream.destroy() - } - return origCb.apply(this, arguments) - } - var part = parts[partNumber] - var hash = transformers.getHashSummer(this.enableSHA256) - var length = partSize - if (length > size - uploadedSize) { - length = size - uploadedSize - } - var start = uploadedSize - var end = uploadedSize + length - 1 - var autoClose = true - var options = { autoClose, start, end } - // verify md5sum of each part - pipesetup(fs.createReadStream(filePath, options), hash) - .on('data', (data) => { - var md5sumHex = Buffer.from(data.md5sum, 'base64').toString('hex') - if (part && md5sumHex === part.etag) { - // md5 matches, chunk already uploaded - partsDone.push({ part: partNumber, etag: part.etag }) - partNumber++ - uploadedSize += length - return cb() - } - // part is not uploaded yet, or md5 mismatch - stream = fs.createReadStream(filePath, options) - uploader(uploadId, partNumber, stream, length, data.sha256sum, data.md5sum, (e, objInfo) => { - if (e) { - return cb(e) - } - partsDone.push({ part: partNumber, etag: objInfo.etag }) - partNumber++ - uploadedSize += length - return cb() - }) - }) - .on('error', (e) => cb(e)) - }, - (e) => { - if (e) { - return cb(e) - } - cb(null, partsDone, uploadId) - }, - ) - }, - // all parts uploaded, complete the multipart upload - (etags, uploadId, cb) => this.completeMultipartUpload(bucketName, objectName, uploadId, etags, cb), - ], - (err, ...rest) => { - if (err === true) { - return - } - callback(err, ...rest) - }, - ) - } - - // Uploads the object. - // - // Uploading a stream - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `stream` _Stream_: Readable stream - // * `size` _number_: size of the object (optional) - // * `callback(err, etag)` _function_: non null `err` indicates error, `etag` _string_ is the etag of the object uploaded. - // - // Uploading "Buffer" or "string" - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `string or Buffer` _string_ or _Buffer_: string or buffer - // * `callback(err, objInfo)` _function_: `err` is `null` in case of success and `info` will have the following object details: - // * `etag` _string_: etag of the object - // * `versionId` _string_: versionId of the object - putObject(bucketName, objectName, stream, size, metaData, callback) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - // We'll need to shift arguments to the left because of size and metaData. - if (isFunction(size)) { - callback = size - metaData = {} - } else if (isFunction(metaData)) { - callback = metaData - metaData = {} - } - - // We'll need to shift arguments to the left because of metaData - // and size being optional. - if (isObject(size)) { - metaData = size - } - - // Ensures Metadata has appropriate prefix for A3 API - metaData = prependXAMZMeta(metaData) - if (typeof stream === 'string' || stream instanceof Buffer) { - // Adapts the non-stream interface into a stream. - size = stream.length - stream = readableStream(stream) - } else if (!isReadableStream(stream)) { - throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"') - } - - if (!isFunction(callback)) { - throw new TypeError('callback should be of type "function"') - } - - if (isNumber(size) && size < 0) { - throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`) - } - - // Get the part size and forward that to the BlockStream. Default to the - // largest block size possible if necessary. - if (!isNumber(size)) { - size = this.maxObjectSize - } - - size = this.calculatePartSize(size) - - // s3 requires that all non-end chunks be at least `this.partSize`, - // so we chunk the stream until we hit either that size or the end before - // we flush it to s3. - let chunker = new BlockStream2({ size, zeroPadding: false }) - - // This is a Writable stream that can be written to in order to upload - // to the specified bucket and object automatically. - let uploader = new ObjectUploader(this, bucketName, objectName, size, metaData, callback) - // stream => chunker => uploader - pipesetup(stream, chunker, uploader) - } - - // Copy the object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `srcObject` _string_: path of the source object to be copied - // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) - // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object - copyObjectV1(arg1, arg2, arg3, arg4, arg5) { - var bucketName = arg1 - var objectName = arg2 - var srcObject = arg3 - var conditions, cb - if (typeof arg4 == 'function' && arg5 === undefined) { - conditions = null - cb = arg4 - } else { - conditions = arg4 - cb = arg5 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(srcObject)) { - throw new TypeError('srcObject should be of type "string"') - } - if (srcObject === '') { - throw new errors.InvalidPrefixError(`Empty source prefix`) - } - - if (conditions !== null && !(conditions instanceof CopyConditions)) { - throw new TypeError('conditions should be of type "CopyConditions"') - } - - var headers = {} - headers['x-amz-copy-source'] = uriResourceEscape(srcObject) - - if (conditions !== null) { - if (conditions.modified !== '') { - headers['x-amz-copy-source-if-modified-since'] = conditions.modified - } - if (conditions.unmodified !== '') { - headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified - } - if (conditions.matchETag !== '') { - headers['x-amz-copy-source-if-match'] = conditions.matchETag - } - if (conditions.matchEtagExcept !== '') { - headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept - } - } - - var method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => cb(null, data)) - }) - } - - /** - * Internal Method to perform copy of an object. - * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions - * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions - * @param cb __function__ called with null if there is an error - * @returns Promise if no callack is passed. - */ - copyObjectV2(sourceConfig, destConfig, cb) { - if (!(sourceConfig instanceof CopySourceOptions)) { - throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') - } - if (!(destConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - if (!destConfig.validate()) { - return false - } - if (!destConfig.validate()) { - return false - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) - - const bucketName = destConfig.Bucket - const objectName = destConfig.Object - - const method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - const transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => { - const resHeaders = response.headers - - const copyObjResponse = { - Bucket: destConfig.Bucket, - Key: destConfig.Object, - LastModified: data.LastModified, - MetaData: extractMetadata(resHeaders), - VersionId: getVersionId(resHeaders), - SourceVersionId: getSourceVersionId(resHeaders), - Etag: sanitizeETag(resHeaders.etag), - Size: +resHeaders['content-length'], - } - - return cb(null, copyObjResponse) - }) - }) - } - - // Backward compatibility for Copy Object API. - copyObject(...allArgs) { - if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { - return this.copyObjectV2(...arguments) - } - return this.copyObjectV1(...arguments) - } - - // list a batch of objects - listObjectsQuery(bucketName, prefix, marker, listQueryOpts = {}) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(marker)) { - throw new TypeError('marker should be of type "string"') - } - let { Delimiter, MaxKeys, IncludeVersion } = listQueryOpts - - if (!isObject(listQueryOpts)) { - throw new TypeError('listQueryOpts should be of type "object"') - } - - if (!isString(Delimiter)) { - throw new TypeError('Delimiter should be of type "string"') - } - if (!isNumber(MaxKeys)) { - throw new TypeError('MaxKeys should be of type "number"') - } - - const queries = [] - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(Delimiter)}`) - queries.push(`encoding-type=url`) - - if (IncludeVersion) { - queries.push(`versions`) - } - - if (marker) { - marker = uriEscape(marker) - if (IncludeVersion) { - queries.push(`key-marker=${marker}`) - } else { - queries.push(`marker=${marker}`) - } - } - - // no need to escape maxKeys - if (MaxKeys) { - if (MaxKeys >= 1000) { - MaxKeys = 1000 - } - queries.push(`max-keys=${MaxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - - var method = 'GET' - var transformer = transformers.getListObjectsTransformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // List the objects in the bucket. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `listOpts _object_: query params to list object with below keys - // * listOpts.MaxKeys _int_ maximum number of keys to return - // * listOpts.IncludeVersion _bool_ true|false to include versions. - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - // * `obj.isDeleteMarker` _boolean_: true if it is a delete marker - // * `obj.versionId` _string_: versionId of the object - listObjects(bucketName, prefix, recursive, listOpts = {}) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isObject(listOpts)) { - throw new TypeError('listOpts should be of type "object"') - } - var marker = '' - const listQueryOpts = { - Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/' - MaxKeys: 1000, - IncludeVersion: listOpts.IncludeVersion, - } - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - marker = result.nextMarker || result.versionIdMarker - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // listObjectsV2Query - (List Objects V2) - List some or all (up to 1000) of the objects in a bucket. - // - // You can use the request parameters as selection criteria to return a subset of the objects in a bucket. - // request parameters :- - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: Limits the response to keys that begin with the specified prefix. - // * `continuation-token` _string_: Used to continue iterating over a set of objects. - // * `delimiter` _string_: A delimiter is a character you use to group keys. - // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. - // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. - listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(continuationToken)) { - throw new TypeError('continuationToken should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - if (!isNumber(maxKeys)) { - throw new TypeError('maxKeys should be of type "number"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - var queries = [] - - // Call for listing objects v2 API - queries.push(`list-type=2`) - queries.push(`encoding-type=url`) - - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - - if (continuationToken) { - continuationToken = uriEscape(continuationToken) - queries.push(`continuation-token=${continuationToken}`) - } - // Set start-after - if (startAfter) { - startAfter = uriEscape(startAfter) - queries.push(`start-after=${startAfter}`) - } - // no need to escape maxKeys - if (maxKeys) { - if (maxKeys >= 1000) { - maxKeys = 1000 - } - queries.push(`max-keys=${maxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListObjectsV2Transformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // List the objects in the bucket using S3 ListObjects V2 - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) - // - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - listObjectsV2(bucketName, prefix, recursive, startAfter) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (startAfter === undefined) { - startAfter = '' - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - // if recursive is false set delimiter to '/' - var delimiter = recursive ? '' : '/' - var continuationToken = '' - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - continuationToken = result.nextContinuationToken - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // Stat information of the object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `statOpts` _object_ : Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional). - // * `callback(err, stat)` _function_: `err` is not `null` in case of error, `stat` contains the object information: - // * `stat.size` _number_: size of the object - // * `stat.etag` _string_: etag of the object - // * `stat.metaData` _string_: MetaData of the object - // * `stat.lastModified` _Date_: modified time stamp - // * `stat.versionId` _string_: version id of the object if available - statObject(bucketName, objectName, statOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // backward compatibility - if (isFunction(statOpts)) { - cb = statOpts - statOpts = {} - } - - if (!isObject(statOpts)) { - throw new errors.InvalidArgumentError('statOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var query = querystring.stringify(statOpts) - var method = 'HEAD' - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - // We drain the socket so that the connection gets closed. Note that this - // is not expensive as the socket will not have any data. - response.on('data', () => {}) - - const result = { - size: +response.headers['content-length'], - metaData: extractMetadata(response.headers), - lastModified: new Date(response.headers['last-modified']), - versionId: getVersionId(response.headers), - etag: sanitizeETag(response.headers.etag), - } - - cb(null, result) - }) - } - - // Remove the specified object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `removeOpts` _object_: Version of the object in the form `{versionId:'my-uuid', governanceBypass:true|false, forceDelete:true|false}`. Default is `{}`. (optional) - // * `callback(err)` _function_: callback function is called with non `null` value in case of error - removeObject(bucketName, objectName, removeOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // backward compatibility - if (isFunction(removeOpts)) { - cb = removeOpts - removeOpts = {} - } - - if (!isObject(removeOpts)) { - throw new errors.InvalidArgumentError('removeOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - const method = 'DELETE' - const queryParams = {} - - if (removeOpts.versionId) { - queryParams.versionId = `${removeOpts.versionId}` - } - const headers = {} - if (removeOpts.governanceBypass) { - headers['X-Amz-Bypass-Governance-Retention'] = true - } - if (removeOpts.forceDelete) { - headers['x-minio-force-delete'] = true - } - - const query = querystring.stringify(queryParams) - - let requestOptions = { method, bucketName, objectName, headers } - if (query) { - requestOptions['query'] = query - } - - this.makeRequest(requestOptions, '', [200, 204], '', false, cb) - } - - // Remove all the objects residing in the objectsList. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectsList` _array_: array of objects of one of the following: - // * List of Object names as array of strings which are object keys: ['objectname1','objectname2'] - // * List of Object name and versionId as an object: [{name:"objectname",versionId:"my-version-id"}] - - removeObjects(bucketName, objectsList, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isArray(objectsList)) { - throw new errors.InvalidArgumentError('objectsList should be a list') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const maxEntries = 1000 - const query = 'delete' - const method = 'POST' - - let result = objectsList.reduce( - (result, entry) => { - result.list.push(entry) - if (result.list.length === maxEntries) { - result.listOfList.push(result.list) - result.list = [] - } - return result - }, - { listOfList: [], list: [] }, - ) - - if (result.list.length > 0) { - result.listOfList.push(result.list) - } - - const encoder = new TextEncoder() - const batchResults = [] - - async.eachSeries( - result.listOfList, - (list, batchCb) => { - var objects = [] - list.forEach(function (value) { - if (isObject(value)) { - objects.push({ Key: value.name, VersionId: value.versionId }) - } else { - objects.push({ Key: value }) - } - }) - let deleteObjects = { Delete: { Quiet: true, Object: objects } } - const builder = new xml2js.Builder({ headless: true }) - let payload = builder.buildObject(deleteObjects) - payload = encoder.encode(payload) - const headers = {} - - headers['Content-MD5'] = toMd5(payload) - - let removeObjectsResult - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', true, (e, response) => { - if (e) { - return batchCb(e) - } - pipesetup(response, transformers.removeObjectsTransformer()) - .on('data', (data) => { - removeObjectsResult = data - }) - .on('error', (e) => { - return batchCb(e, null) - }) - .on('end', () => { - batchResults.push(removeObjectsResult) - return batchCb(null, removeObjectsResult) - }) - }) - }, - () => { - cb(null, _.flatten(batchResults)) - }, - ) - } - - // Get the policy on a bucket or an object prefix. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `callback(err, policy)` _function_: callback function - getBucketPolicy(bucketName, cb) { - // Validate arguments. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let method = 'GET' - let query = 'policy' - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let policy = Buffer.from('') - pipesetup(response, transformers.getConcater()) - .on('data', (data) => (policy = data)) - .on('error', cb) - .on('end', () => { - cb(null, policy.toString()) - }) - }) - } - - // Set the policy on a bucket or an object prefix. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `bucketPolicy` _string_: bucket policy (JSON stringify'ed) - // * `callback(err)` _function_: callback function - setBucketPolicy(bucketName, policy, cb) { - // Validate arguments. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isString(policy)) { - throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let method = 'DELETE' - let query = 'policy' - - if (policy) { - method = 'PUT' - } - - this.makeRequest({ method, bucketName, query }, policy, [204], '', false, cb) - } - - // Generate a generic presigned URL which can be - // used for HTTP methods GET, PUT, HEAD and DELETE - // - // __Arguments__ - // * `method` _string_: name of the HTTP method - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - // * `reqParams` _object_: request parameters (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} - // * `requestDate` _Date_: A date object, the url will be issued at (optional) - presignedUrl(method, bucketName, objectName, expires, reqParams, requestDate, cb) { - if (this.anonymous) { - throw new errors.AnonymousRequestError('Presigned ' + method + ' url cannot be generated for anonymous requests') - } - if (isFunction(requestDate)) { - cb = requestDate - requestDate = new Date() - } - if (isFunction(reqParams)) { - cb = reqParams - reqParams = {} - requestDate = new Date() - } - if (isFunction(expires)) { - cb = expires - reqParams = {} - expires = 24 * 60 * 60 * 7 // 7 days in seconds - requestDate = new Date() - } - if (!isNumber(expires)) { - throw new TypeError('expires should be of type "number"') - } - if (!isObject(reqParams)) { - throw new TypeError('reqParams should be of type "object"') - } - if (!isValidDate(requestDate)) { - throw new TypeError('requestDate should be of type "Date" and valid') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var query = querystring.stringify(reqParams) - this.getBucketRegion(bucketName, (e, region) => { - if (e) { - return cb(e) - } - // This statement is added to ensure that we send error through - // callback on presign failure. - var url - var reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query }) - - this.checkAndRefreshCreds() - try { - url = presignSignatureV4( - reqOptions, - this.accessKey, - this.secretKey, - this.sessionToken, - region, - requestDate, - expires, - ) - } catch (pe) { - return cb(pe) - } - cb(null, url) - }) - } - - // Generate a presigned URL for GET - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - // * `respHeaders` _object_: response headers to override or request params for query (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} - // * `requestDate` _Date_: A date object, the url will be issued at (optional) - presignedGetObject(bucketName, objectName, expires, respHeaders, requestDate, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (isFunction(respHeaders)) { - cb = respHeaders - respHeaders = {} - requestDate = new Date() - } - - var validRespHeaders = [ - 'response-content-type', - 'response-content-language', - 'response-expires', - 'response-cache-control', - 'response-content-disposition', - 'response-content-encoding', - ] - validRespHeaders.forEach((header) => { - if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) { - throw new TypeError(`response header ${header} should be of type "string"`) - } - }) - return this.presignedUrl('GET', bucketName, objectName, expires, respHeaders, requestDate, cb) - } - - // Generate a presigned URL for PUT. Using this URL, the browser can upload to S3 only with the specified object name. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - presignedPutObject(bucketName, objectName, expires, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - return this.presignedUrl('PUT', bucketName, objectName, expires, cb) - } - - // return PostPolicy object - newPostPolicy() { - return new PostPolicy() - } - - // presignedPostPolicy can be used in situations where we want more control on the upload than what - // presignedPutObject() provides. i.e Using presignedPostPolicy we will be able to put policy restrictions - // on the object's `name` `bucket` `expiry` `Content-Type` `Content-Disposition` `metaData` - presignedPostPolicy(postPolicy, cb) { - if (this.anonymous) { - throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests') - } - if (!isObject(postPolicy)) { - throw new TypeError('postPolicy should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - this.getBucketRegion(postPolicy.formData.bucket, (e, region) => { - if (e) { - return cb(e) - } - var date = new Date() - var dateStr = makeDateLong(date) - - this.checkAndRefreshCreds() - - if (!postPolicy.policy.expiration) { - // 'expiration' is mandatory field for S3. - // Set default expiration date of 7 days. - var expires = new Date() - expires.setSeconds(24 * 60 * 60 * 7) - postPolicy.setExpires(expires) - } - - postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr]) - postPolicy.formData['x-amz-date'] = dateStr - - postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256']) - postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' - - postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)]) - postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date) - - if (this.sessionToken) { - postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken]) - postPolicy.formData['x-amz-security-token'] = this.sessionToken - } - - var policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64') - - postPolicy.formData.policy = policyBase64 - - var signature = postPresignSignatureV4(region, date, this.secretKey, policyBase64) - - postPolicy.formData['x-amz-signature'] = signature - var opts = {} - opts.region = region - opts.bucketName = postPolicy.formData.bucket - var reqOptions = this.getRequestOptions(opts) - var portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}` - var urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}` - cb(null, { postURL: urlStr, formData: postPolicy.formData }) - }) - } - - // Calls implemented below are related to multipart. - - // Initiate a new multipart upload. - initiateNewMultipartUpload(bucketName, objectName, metaData, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(metaData)) { - throw new errors.InvalidObjectNameError('contentType should be of type "object"') - } - var method = 'POST' - let headers = Object.assign({}, metaData) - var query = 'uploads' - this.makeRequest({ method, bucketName, objectName, query, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getInitiateMultipartTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (uploadId) => cb(null, uploadId)) - }) - } - - // Complete the multipart upload. After all the parts are uploaded issuing - // this call will aggregate the parts on the server into a single object. - completeMultipartUpload(bucketName, objectName, uploadId, etags, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isObject(etags)) { - throw new TypeError('etags should be of type "Array"') - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - - var method = 'POST' - var query = `uploadId=${uriEscape(uploadId)}` - - var parts = [] - - etags.forEach((element) => { - parts.push({ - Part: [ - { - PartNumber: element.part, - }, - { - ETag: element.etag, - }, - ], - }) - }) - - var payloadObject = { CompleteMultipartUpload: parts } - var payload = Xml(payloadObject) - - this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCompleteMultipartTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (result) => { - if (result.errCode) { - // Multipart Complete API returns an error XML after a 200 http status - cb(new errors.S3Error(result.errMessage)) - } else { - const completeMultipartResult = { - etag: result.etag, - versionId: getVersionId(response.headers), - } - cb(null, completeMultipartResult) - } - }) - }) - } - - // Get part-info of all parts of an incomplete upload specified by uploadId. - listParts(bucketName, objectName, uploadId, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - var parts = [] - var listNext = (marker) => { - this.listPartsQuery(bucketName, objectName, uploadId, marker, (e, result) => { - if (e) { - cb(e) - return - } - parts = parts.concat(result.parts) - if (result.isTruncated) { - listNext(result.marker) - return - } - cb(null, parts) - }) - } - listNext(0) - } - - // Called by listParts to fetch a batch of part-info - listPartsQuery(bucketName, objectName, uploadId, marker, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isNumber(marker)) { - throw new TypeError('marker should be of type "number"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - var query = '' - if (marker && marker !== 0) { - query += `part-number-marker=${marker}&` - } - query += `uploadId=${uriEscape(uploadId)}` - - var method = 'GET' - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getListPartsTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => cb(null, data)) - }) - } - - // Called by listIncompleteUploads to fetch a batch of incomplete uploads. - listIncompleteUploadsQuery(bucketName, prefix, keyMarker, uploadIdMarker, delimiter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(keyMarker)) { - throw new TypeError('keyMarker should be of type "string"') - } - if (!isString(uploadIdMarker)) { - throw new TypeError('uploadIdMarker should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - var queries = [] - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - - if (keyMarker) { - keyMarker = uriEscape(keyMarker) - queries.push(`key-marker=${keyMarker}`) - } - if (uploadIdMarker) { - queries.push(`upload-id-marker=${uploadIdMarker}`) - } - - var maxUploads = 1000 - queries.push(`max-uploads=${maxUploads}`) - queries.sort() - queries.unshift('uploads') - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListMultipartTransformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // Find uploadId of an incomplete upload. - findUploadId(bucketName, objectName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - var latestUpload - var listNext = (keyMarker, uploadIdMarker) => { - this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '') - .on('error', (e) => cb(e)) - .on('data', (result) => { - result.uploads.forEach((upload) => { - if (upload.key === objectName) { - if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) { - latestUpload = upload - return - } - } - }) - if (result.isTruncated) { - listNext(result.nextKeyMarker, result.nextUploadIdMarker) - return - } - if (latestUpload) { - return cb(null, latestUpload.uploadId) - } - cb(null, undefined) - }) - } - listNext('', '') - } - - // Returns a function that can be used for uploading objects. - // If multipart === true, it returns function that is used to upload - // a part of the multipart. - getUploader(bucketName, objectName, metaData, multipart) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isBoolean(multipart)) { - throw new TypeError('multipart should be of type "boolean"') - } - if (!isObject(metaData)) { - throw new TypeError('metadata should be of type "object"') - } - - var validate = (stream, length, sha256sum, md5sum, cb) => { - if (!isReadableStream(stream)) { - throw new TypeError('stream should be of type "Stream"') - } - if (!isNumber(length)) { - throw new TypeError('length should be of type "number"') - } - if (!isString(sha256sum)) { - throw new TypeError('sha256sum should be of type "string"') - } - if (!isString(md5sum)) { - throw new TypeError('md5sum should be of type "string"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - } - var simpleUploader = (...args) => { - validate(...args) - var query = '' - upload(query, ...args) - } - var multipartUploader = (uploadId, partNumber, ...rest) => { - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isNumber(partNumber)) { - throw new TypeError('partNumber should be of type "number"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('Empty uploadId') - } - if (!partNumber) { - throw new errors.InvalidArgumentError('partNumber cannot be 0') - } - validate(...rest) - var query = `partNumber=${partNumber}&uploadId=${uriEscape(uploadId)}` - upload(query, ...rest) - } - var upload = (query, stream, length, sha256sum, md5sum, cb) => { - var method = 'PUT' - let headers = { 'Content-Length': length } - - if (!multipart) { - headers = Object.assign({}, metaData, headers) - } - - if (!this.enableSHA256) { - headers['Content-MD5'] = md5sum - } - this.makeRequestStream( - { method, bucketName, objectName, query, headers }, - stream, - sha256sum, - [200], - '', - true, - (e, response) => { - if (e) { - return cb(e) - } - const result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - cb(null, result) - }, - ) - } - if (multipart) { - return multipartUploader - } - return simpleUploader - } - - // Remove all the notification configurations in the S3 provider - setBucketNotification(bucketName, config, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(config)) { - throw new TypeError('notification config should be of type "Object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'PUT' - var query = 'notification' - var builder = new xml2js.Builder({ - rootName: 'NotificationConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - var payload = builder.buildObject(config) - this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) - } - - removeAllBucketNotification(bucketName, cb) { - this.setBucketNotification(bucketName, new NotificationConfig(), cb) - } - - // Return the list of notification configurations stored - // in the S3 provider - getBucketNotification(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'GET' - var query = 'notification' - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getBucketNotificationTransformer() - var bucketNotification - pipesetup(response, transformer) - .on('data', (result) => (bucketNotification = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, bucketNotification)) - }) - } - - // Listens for bucket notifications. Returns an EventEmitter. - listenBucketNotification(bucketName, prefix, suffix, events) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix must be of type string') - } - if (!isString(suffix)) { - throw new TypeError('suffix must be of type string') - } - if (!isArray(events)) { - throw new TypeError('events must be of type Array') - } - let listener = new NotificationPoller(this, bucketName, prefix, suffix, events) - listener.start() - - return listener - } - - getBucketVersioning(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - var method = 'GET' - var query = 'versioning' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let versionConfig = Buffer.from('') - pipesetup(response, transformers.bucketVersioningTransformer()) - .on('data', (data) => { - versionConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, versionConfig) - }) - }) - } - - setBucketVersioning(bucketName, versionConfig, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!Object.keys(versionConfig).length) { - throw new errors.InvalidArgumentError('versionConfig should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var method = 'PUT' - var query = 'versioning' - var builder = new xml2js.Builder({ - rootName: 'VersioningConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - var payload = builder.buildObject(versionConfig) - - this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) - } - - /** To set Tags on a bucket or object based on the params - * __Arguments__ - * taggingParams _object_ Which contains the following properties - * bucketName _string_, - * objectName _string_ (Optional), - * tags _object_ of the form {'':'','':''} - * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setTagging(taggingParams) { - const { bucketName, objectName, tags, putOpts = {}, cb } = taggingParams - const method = 'PUT' - let query = 'tagging' - - if (putOpts && putOpts.versionId) { - query = `${query}&versionId=${putOpts.versionId}` - } - const tagsList = [] - for (const [key, value] of Object.entries(tags)) { - tagsList.push({ Key: key, Value: value }) - } - const taggingConfig = { - Tagging: { - TagSet: { - Tag: tagsList, - }, - }, - } - const encoder = new TextEncoder() - const headers = {} - const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } }) - let payload = builder.buildObject(taggingConfig) - payload = encoder.encode(payload) - headers['Content-MD5'] = toMd5(payload) - const requestOptions = { method, bucketName, query, headers } - - if (objectName) { - requestOptions['objectName'] = objectName - } - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest(requestOptions, payload, [200], '', false, cb) - } - - /** Set Tags on a Bucket - * __Arguments__ - * bucketName _string_ - * tags _object_ of the form {'':'','':''} - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setBucketTagging(bucketName, tags, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(tags)) { - throw new errors.InvalidArgumentError('tags should be of type "object"') - } - if (Object.keys(tags).length > 10) { - throw new errors.InvalidArgumentError('maximum tags allowed is 10"') - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - return this.setTagging({ bucketName, tags, cb }) - } - - /** Set Tags on an Object - * __Arguments__ - * bucketName _string_ - * objectName _string_ - * * tags _object_ of the form {'':'','':''} - * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setObjectTagging(bucketName, objectName, tags, putOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - - if (isFunction(putOpts)) { - cb = putOpts - putOpts = {} - } - - if (!isObject(tags)) { - throw new errors.InvalidArgumentError('tags should be of type "object"') - } - if (Object.keys(tags).length > 10) { - throw new errors.InvalidArgumentError('Maximum tags allowed is 10"') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - return this.setTagging({ bucketName, objectName, tags, putOpts, cb }) - } - - /** Remove Tags on an Bucket/Object based on params - * __Arguments__ - * bucketName _string_ - * objectName _string_ (optional) - * removeOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeTagging({ bucketName, objectName, removeOpts, cb }) { - const method = 'DELETE' - let query = 'tagging' - - if (removeOpts && Object.keys(removeOpts).length && removeOpts.versionId) { - query = `${query}&versionId=${removeOpts.versionId}` - } - const requestOptions = { method, bucketName, objectName, query } - - if (objectName) { - requestOptions['objectName'] = objectName - } - this.makeRequest(requestOptions, '', [200, 204], '', true, cb) - } - - /** Remove Tags associated with a bucket - * __Arguments__ - * bucketName _string_ - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeBucketTagging(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - return this.removeTagging({ bucketName, cb }) - } - - /** Remove tags associated with an object - * __Arguments__ - * bucketName _string_ - * objectName _string_ - * removeOpts _object_ (Optional) e.g. {VersionID:"my-object-version-id"} - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeObjectTagging(bucketName, objectName, removeOpts, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - if (isFunction(removeOpts)) { - cb = removeOpts - removeOpts = {} - } - if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) { - throw new errors.InvalidArgumentError('removeOpts should be of type "object"') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - return this.removeTagging({ bucketName, objectName, removeOpts, cb }) - } - - /** Get Tags associated with a Bucket - * __Arguments__ - * bucketName _string_ - * `cb(error, tags)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - getBucketTagging(bucketName, cb) { - const method = 'GET' - const query = 'tagging' - const requestOptions = { method, bucketName, query } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - var transformer = transformers.getTagsTransformer() - if (e) { - return cb(e) - } - let tagsList - pipesetup(response, transformer) - .on('data', (result) => (tagsList = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, tagsList)) - }) - } - - /** Get the tags associated with a bucket OR an object - * bucketName _string_ - * objectName _string_ (Optional) - * getOpts _object_ (Optional) e.g {versionId:"my-object-version-id"} - * `cb(error, tags)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - getObjectTagging(bucketName, objectName, getOpts = {}, cb = () => false) { - const method = 'GET' - let query = 'tagging' - - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - if (!isObject(getOpts)) { - throw new errors.InvalidArgumentError('getOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - if (getOpts && getOpts.versionId) { - query = `${query}&versionId=${getOpts.versionId}` - } - const requestOptions = { method, bucketName, query } - if (objectName) { - requestOptions['objectName'] = objectName - } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - const transformer = transformers.getTagsTransformer() - if (e) { - return cb(e) - } - let tagsList - pipesetup(response, transformer) - .on('data', (result) => (tagsList = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, tagsList)) - }) - } - - /** - * Apply lifecycle configuration on a bucket. - * bucketName _string_ - * policyConfig _object_ a valid policy configuration object. - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - applyBucketLifecycle(bucketName, policyConfig, cb) { - const method = 'PUT' - const query = 'lifecycle' - - const encoder = new TextEncoder() - const headers = {} - const builder = new xml2js.Builder({ - rootName: 'LifecycleConfiguration', - headless: true, - renderOpts: { pretty: false }, - }) - let payload = builder.buildObject(policyConfig) - payload = encoder.encode(payload) - const requestOptions = { method, bucketName, query, headers } - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest(requestOptions, payload, [200], '', false, cb) - } - - /** Remove lifecycle configuration of a bucket. - * bucketName _string_ - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeBucketLifecycle(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'DELETE' - const query = 'lifecycle' - this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) - } - - /** Set/Override lifecycle configuration on a bucket. if the configuration is empty, it removes the configuration. - * bucketName _string_ - * lifeCycleConfig _object_ one of the following values: (null or '') to remove the lifecycle configuration. or a valid lifecycle configuration - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setBucketLifecycle(bucketName, lifeCycleConfig = null, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (_.isEmpty(lifeCycleConfig)) { - this.removeBucketLifecycle(bucketName, cb) - } else { - this.applyBucketLifecycle(bucketName, lifeCycleConfig, cb) - } - } - - /** Get lifecycle configuration on a bucket. - * bucketName _string_ - * `cb(config)` _function_ - callback function with lifecycle configuration as the error argument. - */ - getBucketLifecycle(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'GET' - const query = 'lifecycle' - const requestOptions = { method, bucketName, query } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - const transformer = transformers.lifecycleTransformer() - if (e) { - return cb(e) - } - let lifecycleConfig - pipesetup(response, transformer) - .on('data', (result) => (lifecycleConfig = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, lifecycleConfig)) - }) - } - - setObjectLockConfig(bucketName, lockConfigOpts = {}, cb) { - const retentionModes = [RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE] - const validUnits = [RETENTION_VALIDITY_UNITS.DAYS, RETENTION_VALIDITY_UNITS.YEARS] - - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - - if (lockConfigOpts.mode && !retentionModes.includes(lockConfigOpts.mode)) { - throw new TypeError(`lockConfigOpts.mode should be one of ${retentionModes}`) - } - if (lockConfigOpts.unit && !validUnits.includes(lockConfigOpts.unit)) { - throw new TypeError(`lockConfigOpts.unit should be one of ${validUnits}`) - } - if (lockConfigOpts.validity && !isNumber(lockConfigOpts.validity)) { - throw new TypeError(`lockConfigOpts.validity should be a number`) - } - - const method = 'PUT' - const query = 'object-lock' - - let config = { - ObjectLockEnabled: 'Enabled', - } - const configKeys = Object.keys(lockConfigOpts) - // Check if keys are present and all keys are present. - if (configKeys.length > 0) { - if (_.difference(configKeys, ['unit', 'mode', 'validity']).length !== 0) { - throw new TypeError( - `lockConfigOpts.mode,lockConfigOpts.unit,lockConfigOpts.validity all the properties should be specified.`, - ) - } else { - config.Rule = { - DefaultRetention: {}, - } - if (lockConfigOpts.mode) { - config.Rule.DefaultRetention.Mode = lockConfigOpts.mode - } - if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.DAYS) { - config.Rule.DefaultRetention.Days = lockConfigOpts.validity - } else if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.YEARS) { - config.Rule.DefaultRetention.Years = lockConfigOpts.validity - } - } - } - - const builder = new xml2js.Builder({ - rootName: 'ObjectLockConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - const payload = builder.buildObject(config) - - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getObjectLockConfig(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'object-lock' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let objectLockConfig = Buffer.from('') - pipesetup(response, transformers.objectLockTransformer()) - .on('data', (data) => { - objectLockConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, objectLockConfig) - }) - }) - } - - putObjectRetention(bucketName, objectName, retentionOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(retentionOpts)) { - throw new errors.InvalidArgumentError('retentionOpts should be of type "object"') - } else { - if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) { - throw new errors.InvalidArgumentError('Invalid value for governanceBypass', retentionOpts.governanceBypass) - } - if ( - retentionOpts.mode && - ![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode) - ) { - throw new errors.InvalidArgumentError('Invalid object retention mode ', retentionOpts.mode) - } - if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) { - throw new errors.InvalidArgumentError('Invalid value for retainUntilDate', retentionOpts.retainUntilDate) - } - if (retentionOpts.versionId && !isString(retentionOpts.versionId)) { - throw new errors.InvalidArgumentError('Invalid value for versionId', retentionOpts.versionId) - } - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'PUT' - let query = 'retention' - - const headers = {} - if (retentionOpts.governanceBypass) { - headers['X-Amz-Bypass-Governance-Retention'] = true - } - - const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true }) - const params = {} - - if (retentionOpts.mode) { - params.Mode = retentionOpts.mode - } - if (retentionOpts.retainUntilDate) { - params.RetainUntilDate = retentionOpts.retainUntilDate - } - if (retentionOpts.versionId) { - query += `&versionId=${retentionOpts.versionId}` - } - - let payload = builder.buildObject(params) - - headers['Content-MD5'] = toMd5(payload) - this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200, 204], '', false, cb) - } - - getObjectRetention(bucketName, objectName, getOpts, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(getOpts)) { - throw new errors.InvalidArgumentError('callback should be of type "object"') - } else if (getOpts.versionId && !isString(getOpts.versionId)) { - throw new errors.InvalidArgumentError('VersionID should be of type "string"') - } - if (cb && !isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - let query = 'retention' - if (getOpts.versionId) { - query += `&versionId=${getOpts.versionId}` - } - - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let retentionConfig = Buffer.from('') - pipesetup(response, transformers.objectRetentionTransformer()) - .on('data', (data) => { - retentionConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, retentionConfig) - }) - }) - } - - setBucketEncryption(bucketName, encryptionConfig, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - - if (isFunction(encryptionConfig)) { - cb = encryptionConfig - encryptionConfig = null - } - - if (!_.isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) { - throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed.: ' + encryptionConfig.Rule) - } - if (cb && !isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let encryptionObj = encryptionConfig - if (_.isEmpty(encryptionConfig)) { - encryptionObj = { - // Default MinIO Server Supported Rule - Rule: [ - { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: 'AES256', - }, - }, - ], - } - } - - let method = 'PUT' - let query = 'encryption' - let builder = new xml2js.Builder({ - rootName: 'ServerSideEncryptionConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - let payload = builder.buildObject(encryptionObj) - - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getBucketEncryption(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'encryption' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let bucketEncConfig = Buffer.from('') - pipesetup(response, transformers.bucketEncryptionTransformer()) - .on('data', (data) => { - bucketEncConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, bucketEncConfig) - }) - }) - } - removeBucketEncryption(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'DELETE' - const query = 'encryption' - - this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) - } - - setBucketReplication(bucketName, replicationConfig = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(replicationConfig)) { - throw new errors.InvalidArgumentError('replicationConfig should be of type "object"') - } else { - if (_.isEmpty(replicationConfig.role)) { - throw new errors.InvalidArgumentError('Role cannot be empty') - } else if (replicationConfig.role && !isString(replicationConfig.role)) { - throw new errors.InvalidArgumentError('Invalid value for role', replicationConfig.role) - } - if (_.isEmpty(replicationConfig.rules)) { - throw new errors.InvalidArgumentError('Minimum one replication rule must be specified') - } - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'PUT' - let query = 'replication' - const headers = {} - - const replicationParamsConfig = { - ReplicationConfiguration: { - Role: replicationConfig.role, - Rule: replicationConfig.rules, - }, - } - - const builder = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true }) - - let payload = builder.buildObject(replicationParamsConfig) - - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getBucketReplication(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'replication' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let replicationConfig = Buffer.from('') - pipesetup(response, transformers.replicationConfigTransformer()) - .on('data', (data) => { - replicationConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, replicationConfig) - }) - }) - } - - removeBucketReplication(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'DELETE' - const query = 'replication' - this.makeRequest({ method, bucketName, query }, '', [200, 204], '', false, cb) - } - - getObjectLegalHold(bucketName, objectName, getOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isObject(getOpts)) { - throw new TypeError('getOpts should be of type "Object"') - } else if (Object.keys(getOpts).length > 0 && getOpts.versionId && !isString(getOpts.versionId)) { - throw new TypeError('versionId should be of type string.:', getOpts.versionId) - } - - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - const method = 'GET' - let query = 'legal-hold' - - if (getOpts.versionId) { - query += `&versionId=${getOpts.versionId}` - } - - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let legalHoldConfig = Buffer.from('') - pipesetup(response, transformers.objectLegalHoldTransformer()) - .on('data', (data) => { - legalHoldConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, legalHoldConfig) - }) - }) - } - - setObjectLegalHold(bucketName, objectName, setOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - const defaultOpts = { - status: LEGAL_HOLD_STATUS.ENABLED, - } - if (isFunction(setOpts)) { - cb = setOpts - setOpts = defaultOpts - } - - if (!isObject(setOpts)) { - throw new TypeError('setOpts should be of type "Object"') - } else { - if (![LEGAL_HOLD_STATUS.ENABLED, LEGAL_HOLD_STATUS.DISABLED].includes(setOpts.status)) { - throw new TypeError('Invalid status: ' + setOpts.status) - } - if (setOpts.versionId && !setOpts.versionId.length) { - throw new TypeError('versionId should be of type string.:' + setOpts.versionId) - } - } - - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - if (_.isEmpty(setOpts)) { - setOpts = { - defaultOpts, - } - } - - const method = 'PUT' - let query = 'legal-hold' - - if (setOpts.versionId) { - query += `&versionId=${setOpts.versionId}` - } - - let config = { - Status: setOpts.status, - } - - const builder = new xml2js.Builder({ rootName: 'LegalHold', renderOpts: { pretty: false }, headless: true }) - const payload = builder.buildObject(config) - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200], '', false, cb) - } - async setCredentialsProvider(credentialsProvider) { - if (!(credentialsProvider instanceof CredentialProvider)) { - throw new Error('Unable to get credentials. Expected instance of CredentialProvider') - } - this.credentialsProvider = credentialsProvider - await this.checkAndRefreshCreds() - } - - async checkAndRefreshCreds() { - if (this.credentialsProvider) { - return await this.fetchCredentials() - } - } - - async fetchCredentials() { - if (this.credentialsProvider) { - const credentialsConf = await this.credentialsProvider.getCredentials() - if (credentialsConf) { - this.accessKey = credentialsConf.getAccessKey() - this.secretKey = credentialsConf.getSecretKey() - this.sessionToken = credentialsConf.getSessionToken() - } else { - throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') - } - } else { - throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') - } - } - - /** - * Internal Method to abort a multipart upload request in case of any errors. - * @param bucketName __string__ Bucket Name - * @param objectName __string__ Object Name - * @param uploadId __string__ id of a multipart upload to cancel during compose object sequence. - * @param cb __function__ callback function - */ - abortMultipartUpload(bucketName, objectName, uploadId, cb) { - const method = 'DELETE' - let query = `uploadId=${uploadId}` - - const requestOptions = { method, bucketName, objectName: objectName, query } - this.makeRequest(requestOptions, '', [204], '', false, cb) - } - - /** - * Internal method to upload a part during compose object. - * @param partConfig __object__ contains the following. - * bucketName __string__ - * objectName __string__ - * uploadID __string__ - * partNumber __number__ - * headers __object__ - * @param cb called with null incase of error. - */ - uploadPartCopy(partConfig, cb) { - const { bucketName, objectName, uploadID, partNumber, headers } = partConfig - - const method = 'PUT' - let query = `uploadId=${uploadID}&partNumber=${partNumber}` - const requestOptions = { method, bucketName, objectName: objectName, query, headers } - return this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - let partCopyResult = Buffer.from('') - if (e) { - return cb(e) - } - pipesetup(response, transformers.uploadPartTransformer()) - .on('data', (data) => { - partCopyResult = data - }) - .on('error', cb) - .on('end', () => { - let uploadPartCopyRes = { - etag: sanitizeETag(partCopyResult.ETag), - key: objectName, - part: partNumber, - } - - cb(null, uploadPartCopyRes) - }) - }) - } - - composeObject(destObjConfig = {}, sourceObjList = [], cb) { - const me = this // many async flows. so store the ref. - const sourceFilesLength = sourceObjList.length - - if (!isArray(sourceObjList)) { - throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') - } - if (!(destObjConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - - if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, - ) - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - for (let i = 0; i < sourceFilesLength; i++) { - if (!sourceObjList[i].validate()) { - return false - } - } - - if (!destObjConfig.validate()) { - return false - } - - const getStatOptions = (srcConfig) => { - let statOpts = {} - if (!_.isEmpty(srcConfig.VersionID)) { - statOpts = { - versionId: srcConfig.VersionID, - } - } - return statOpts - } - const srcObjectSizes = [] - let totalSize = 0 - let totalParts = 0 - - const sourceObjStats = sourceObjList.map((srcItem) => - me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), - ) - - return Promise.all(sourceObjStats) - .then((srcObjectInfos) => { - const validatedStats = srcObjectInfos.map((resItemStat, index) => { - const srcConfig = sourceObjList[index] - - let srcCopySize = resItemStat.size - // Check if a segment is specified, and if so, is the - // segment within object bounds? - if (srcConfig.MatchRange) { - // Since range is specified, - // 0 <= src.srcStart <= src.srcEnd - // so only invalid case to check is: - const srcStart = srcConfig.Start - const srcEnd = srcConfig.End - if (srcEnd >= srcCopySize || srcStart < 0) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, - ) - } - srcCopySize = srcEnd - srcStart + 1 - } - - // Only the last source may be less than `absMinPartSize` - if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, - ) - } - - // Is data to copy too large? - totalSize += srcCopySize - if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { - throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) - } - - // record source size - srcObjectSizes[index] = srcCopySize - - // calculate parts needed for current source - totalParts += partsRequired(srcCopySize) - // Do we need more parts than we are allowed? - if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, - ) - } - - return resItemStat - }) - - if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { - return this.copyObject(sourceObjList[0], destObjConfig, cb) // use copyObjectV2 - } - - // preserve etag to avoid modification of object while copying. - for (let i = 0; i < sourceFilesLength; i++) { - sourceObjList[i].MatchETag = validatedStats[i].etag - } - - const splitPartSizeList = validatedStats.map((resItemStat, idx) => { - const calSize = calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) - return calSize - }) - - function getUploadPartConfigList(uploadId) { - const uploadPartConfigList = [] - - splitPartSizeList.forEach((splitSize, splitIndex) => { - const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize - - let partIndex = splitIndex + 1 // part index starts from 1. - const totalUploads = Array.from(startIdx) - - const headers = sourceObjList[splitIndex].getHeaders() - - totalUploads.forEach((splitStart, upldCtrIdx) => { - let splitEnd = endIdx[upldCtrIdx] - - const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` - headers['x-amz-copy-source'] = `${sourceObj}` - headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` - - const uploadPartConfig = { - bucketName: destObjConfig.Bucket, - objectName: destObjConfig.Object, - uploadID: uploadId, - partNumber: partIndex, - headers: headers, - sourceObj: sourceObj, - } - - uploadPartConfigList.push(uploadPartConfig) - }) - }) - - return uploadPartConfigList - } - - const performUploadParts = (uploadId) => { - const uploadList = getUploadPartConfigList(uploadId) - - async.map(uploadList, me.uploadPartCopy.bind(me), (err, res) => { - if (err) { - return this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, cb) - } - const partsDone = res.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) - return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone, cb) - }) - } - - const newUploadHeaders = destObjConfig.getHeaders() - - me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders, (err, uploadId) => { - if (err) { - return cb(err, null) - } - performUploadParts(uploadId) - }) - }) - .catch((error) => { - cb(error, null) - }) - } - selectObjectContent(bucketName, objectName, selectOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!_.isEmpty(selectOpts)) { - if (!isString(selectOpts.expression)) { - throw new TypeError('sqlExpression should be of type "string"') - } - if (!_.isEmpty(selectOpts.inputSerialization)) { - if (!isObject(selectOpts.inputSerialization)) { - throw new TypeError('inputSerialization should be of type "object"') - } - } else { - throw new TypeError('inputSerialization is required') - } - if (!_.isEmpty(selectOpts.outputSerialization)) { - if (!isObject(selectOpts.outputSerialization)) { - throw new TypeError('outputSerialization should be of type "object"') - } - } else { - throw new TypeError('outputSerialization is required') - } - } else { - throw new TypeError('valid select configuration is required') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'POST' - let query = `select` - query += '&select-type=2' - - const config = [ - { - Expression: selectOpts.expression, - }, - { - ExpressionType: selectOpts.expressionType || 'SQL', - }, - { - InputSerialization: [selectOpts.inputSerialization], - }, - { - OutputSerialization: [selectOpts.outputSerialization], - }, - ] - - // Optional - if (selectOpts.requestProgress) { - config.push({ RequestProgress: selectOpts.requestProgress }) - } - // Optional - if (selectOpts.scanRange) { - config.push({ ScanRange: selectOpts.scanRange }) - } - - const builder = new xml2js.Builder({ - rootName: 'SelectObjectContentRequest', - renderOpts: { pretty: false }, - headless: true, - }) - const payload = builder.buildObject(config) - - this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let selectResult - pipesetup(response, transformers.selectObjectContentTransformer()) - .on('data', (data) => { - selectResult = parseSelectObjectContentResponse(data) - }) - .on('error', cb) - .on('end', () => { - cb(null, selectResult) - }) - }) - } - - get extensions() { - if (!this.clientExtensions) { - this.clientExtensions = new extensions(this) - } - return this.clientExtensions - } -} - -// Promisify various public-facing APIs on the Client module. -Client.prototype.makeBucket = promisify(Client.prototype.makeBucket) -Client.prototype.listBuckets = promisify(Client.prototype.listBuckets) -Client.prototype.bucketExists = promisify(Client.prototype.bucketExists) -Client.prototype.removeBucket = promisify(Client.prototype.removeBucket) - -Client.prototype.getObject = promisify(Client.prototype.getObject) -Client.prototype.getPartialObject = promisify(Client.prototype.getPartialObject) -Client.prototype.fGetObject = promisify(Client.prototype.fGetObject) -Client.prototype.putObject = promisify(Client.prototype.putObject) -Client.prototype.fPutObject = promisify(Client.prototype.fPutObject) -Client.prototype.copyObject = promisify(Client.prototype.copyObject) -Client.prototype.statObject = promisify(Client.prototype.statObject) -Client.prototype.removeObject = promisify(Client.prototype.removeObject) -Client.prototype.removeObjects = promisify(Client.prototype.removeObjects) - -Client.prototype.presignedUrl = promisify(Client.prototype.presignedUrl) -Client.prototype.presignedGetObject = promisify(Client.prototype.presignedGetObject) -Client.prototype.presignedPutObject = promisify(Client.prototype.presignedPutObject) -Client.prototype.presignedPostPolicy = promisify(Client.prototype.presignedPostPolicy) -Client.prototype.getBucketNotification = promisify(Client.prototype.getBucketNotification) -Client.prototype.setBucketNotification = promisify(Client.prototype.setBucketNotification) -Client.prototype.removeAllBucketNotification = promisify(Client.prototype.removeAllBucketNotification) -Client.prototype.getBucketPolicy = promisify(Client.prototype.getBucketPolicy) -Client.prototype.setBucketPolicy = promisify(Client.prototype.setBucketPolicy) -Client.prototype.removeIncompleteUpload = promisify(Client.prototype.removeIncompleteUpload) -Client.prototype.getBucketVersioning = promisify(Client.prototype.getBucketVersioning) -Client.prototype.setBucketVersioning = promisify(Client.prototype.setBucketVersioning) -Client.prototype.setBucketTagging = promisify(Client.prototype.setBucketTagging) -Client.prototype.removeBucketTagging = promisify(Client.prototype.removeBucketTagging) -Client.prototype.getBucketTagging = promisify(Client.prototype.getBucketTagging) -Client.prototype.setObjectTagging = promisify(Client.prototype.setObjectTagging) -Client.prototype.removeObjectTagging = promisify(Client.prototype.removeObjectTagging) -Client.prototype.getObjectTagging = promisify(Client.prototype.getObjectTagging) -Client.prototype.setBucketLifecycle = promisify(Client.prototype.setBucketLifecycle) -Client.prototype.getBucketLifecycle = promisify(Client.prototype.getBucketLifecycle) -Client.prototype.removeBucketLifecycle = promisify(Client.prototype.removeBucketLifecycle) -Client.prototype.setObjectLockConfig = promisify(Client.prototype.setObjectLockConfig) -Client.prototype.getObjectLockConfig = promisify(Client.prototype.getObjectLockConfig) -Client.prototype.putObjectRetention = promisify(Client.prototype.putObjectRetention) -Client.prototype.getObjectRetention = promisify(Client.prototype.getObjectRetention) -Client.prototype.setBucketEncryption = promisify(Client.prototype.setBucketEncryption) -Client.prototype.getBucketEncryption = promisify(Client.prototype.getBucketEncryption) -Client.prototype.removeBucketEncryption = promisify(Client.prototype.removeBucketEncryption) -Client.prototype.setBucketReplication = promisify(Client.prototype.setBucketReplication) -Client.prototype.getBucketReplication = promisify(Client.prototype.getBucketReplication) -Client.prototype.removeBucketReplication = promisify(Client.prototype.removeBucketReplication) -Client.prototype.setObjectLegalHold = promisify(Client.prototype.setObjectLegalHold) -Client.prototype.getObjectLegalHold = promisify(Client.prototype.getObjectLegalHold) -Client.prototype.composeObject = promisify(Client.prototype.composeObject) -Client.prototype.selectObjectContent = promisify(Client.prototype.selectObjectContent) - -export class CopyConditions { - constructor() { - this.modified = '' - this.unmodified = '' - this.matchETag = '' - this.matchETagExcept = '' - } - - setModified(date) { - if (!(date instanceof Date)) { - throw new TypeError('date must be of type Date') - } - - this.modified = date.toUTCString() - } - - setUnmodified(date) { - if (!(date instanceof Date)) { - throw new TypeError('date must be of type Date') - } - - this.unmodified = date.toUTCString() - } - - setMatchETag(etag) { - this.matchETag = etag - } - - setMatchETagExcept(etag) { - this.matchETagExcept = etag - } -} - -// Build PostPolicy object that can be signed by presignedPostPolicy -export class PostPolicy { - constructor() { - this.policy = { - conditions: [], - } - this.formData = {} - } - - // set expiration date - setExpires(date) { - if (!date) { - throw new errors.InvalidDateError('Invalid date : cannot be null') - } - this.policy.expiration = date.toISOString() - } - - // set object name - setKey(objectName) { - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name : ${objectName}`) - } - this.policy.conditions.push(['eq', '$key', objectName]) - this.formData.key = objectName - } - - // set object name prefix, i.e policy allows any keys with this prefix - setKeyStartsWith(prefix) { - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - this.policy.conditions.push(['starts-with', '$key', prefix]) - this.formData.key = prefix - } - - // set bucket name - setBucket(bucketName) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) - } - this.policy.conditions.push(['eq', '$bucket', bucketName]) - this.formData.bucket = bucketName - } - - // set Content-Type - setContentType(type) { - if (!type) { - throw new Error('content-type cannot be null') - } - this.policy.conditions.push(['eq', '$Content-Type', type]) - this.formData['Content-Type'] = type - } - - // set Content-Type prefix, i.e image/ allows any image - setContentTypeStartsWith(prefix) { - if (!prefix) { - throw new Error('content-type cannot be null') - } - this.policy.conditions.push(['starts-with', '$Content-Type', prefix]) - this.formData['Content-Type'] = prefix - } - - // set Content-Disposition - setContentDisposition(value) { - if (!value) { - throw new Error('content-disposition cannot be null') - } - this.policy.conditions.push(['eq', '$Content-Disposition', value]) - this.formData['Content-Disposition'] = value - } - - // set minimum/maximum length of what Content-Length can be. - setContentLengthRange(min, max) { - if (min > max) { - throw new Error('min cannot be more than max') - } - if (min < 0) { - throw new Error('min should be > 0') - } - if (max < 0) { - throw new Error('max should be > 0') - } - this.policy.conditions.push(['content-length-range', min, max]) - } - - // set user defined metadata - setUserMetaData(metaData) { - if (!isObject(metaData)) { - throw new TypeError('metadata should be of type "object"') - } - Object.entries(metaData).forEach(([key, value]) => { - const amzMetaDataKey = `x-amz-meta-${key}` - this.policy.conditions.push(['eq', `$${amzMetaDataKey}`, value]) - this.formData[amzMetaDataKey] = value - }) - } -} diff --git a/src/minio.ts b/src/minio.ts new file mode 100644 index 00000000..9ccad980 --- /dev/null +++ b/src/minio.ts @@ -0,0 +1,41 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { TypedClient2 } from './typed-client2.ts' + +export { AssumeRoleProvider } from './AssumeRoleProvider.ts' +export { CopyConditions } from './copyConditions.ts' +export { CredentialProvider } from './CredentialProvider.ts' +export { Credentials } from './Credentials.ts' +export { + CopyDestinationOptions, + CopySourceOptions, + DEFAULT_REGION, + ENCRYPTION_TYPES, + PART_CONSTRAINTS, +} from './helpers.ts' +export type { NotificationEvent, NotificationRecord } from './notification.ts' +export { + buildARN, + CloudFunctionConfig, + NotificationConfig, + NotificationPoller, + QueueConfig, + TopicConfig, +} from './notification.ts' +export { PostPolicy } from './postPolicy.ts' + +export class Client extends TypedClient2 {} diff --git a/src/notification.js b/src/notification.ts similarity index 51% rename from src/notification.js rename to src/notification.ts index 5fe14541..1b26db23 100644 --- a/src/notification.js +++ b/src/notification.ts @@ -16,51 +16,40 @@ import { EventEmitter } from 'node:events' -import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.js' -import * as transformers from './transformers.js' +import jsonLineParser from 'stream-json/jsonl/Parser.js' -// Notification config - array of target configs. -// Target configs can be -// 1. Topic (simple notification service) -// 2. Queue (simple queue service) -// 3. CloudFront (lambda function) -export class NotificationConfig { - add(target) { - let instance = '' - if (target instanceof TopicConfig) { - instance = 'TopicConfiguration' - } - if (target instanceof QueueConfig) { - instance = 'QueueConfiguration' - } - if (target instanceof CloudFunctionConfig) { - instance = 'CloudFunctionConfiguration' - } - if (!this[instance]) { - this[instance] = [] - } - this[instance].push(target) - } -} +import type { Client } from './client.ts' +import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.ts' + +// TODO: type this + +type Event = unknown // Base class for three supported configs. -class TargetConfig { - setId(id) { +export class TargetConfig { + private Filter?: { S3Key: { FilterRule: { Name: string; Value: string }[] } } + private Event?: Event[] + private Id: any + + setId(id: any) { this.Id = id } - addEvent(newevent) { + + addEvent(newevent: Event) { if (!this.Event) { this.Event = [] } this.Event.push(newevent) } - addFilterSuffix(suffix) { + + addFilterSuffix(suffix: string) { if (!this.Filter) { this.Filter = { S3Key: { FilterRule: [] } } } this.Filter.S3Key.FilterRule.push({ Name: 'suffix', Value: suffix }) } - addFilterPrefix(prefix) { + + addFilterPrefix(prefix: string) { if (!this.Filter) { this.Filter = { S3Key: { FilterRule: [] } } } @@ -70,7 +59,9 @@ class TargetConfig { // 1. Topic (simple notification service) export class TopicConfig extends TargetConfig { - constructor(arn) { + private Topic: string + + constructor(arn: string) { super() this.Topic = arn } @@ -78,7 +69,9 @@ export class TopicConfig extends TargetConfig { // 2. Queue (simple queue service) export class QueueConfig extends TargetConfig { - constructor(arn) { + private Queue: string + + constructor(arn: string) { super() this.Queue = arn } @@ -86,16 +79,44 @@ export class QueueConfig extends TargetConfig { // 3. CloudFront (lambda function) export class CloudFunctionConfig extends TargetConfig { - constructor(arn) { + private CloudFunction: string + + constructor(arn: string) { super() this.CloudFunction = arn } } -export const buildARN = (partition, service, region, accountId, resource) => { - return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource +// Notification config - array of target configs. +// Target configs can be +// 1. Topic (simple notification service) +// 2. Queue (simple queue service) +// 3. CloudFront (lambda function) +export class NotificationConfig { + private TopicConfiguration?: TargetConfig[] + private CloudFunctionConfiguration?: TargetConfig[] + private QueueConfiguration?: TargetConfig[] + + add(target: TargetConfig) { + let instance: TargetConfig[] | undefined + if (target instanceof TopicConfig) { + instance = this.TopicConfiguration ??= [] + } + if (target instanceof QueueConfig) { + instance = this.QueueConfiguration ??= [] + } + if (target instanceof CloudFunctionConfig) { + instance = this.CloudFunctionConfiguration ??= [] + } + if (instance) { + instance.push(target) + } + } } +export const buildARN = (partition: string, service: string, region: string, accountId: string, resource: string) => { + return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource +} export const ObjectCreatedAll = 's3:ObjectCreated:*' export const ObjectCreatedPut = 's3:ObjectCreated:Put' export const ObjectCreatedPost = 's3:ObjectCreated:Post' @@ -105,12 +126,39 @@ export const ObjectRemovedAll = 's3:ObjectRemoved:*' export const ObjectRemovedDelete = 's3:ObjectRemoved:Delete' export const ObjectRemovedDeleteMarkerCreated = 's3:ObjectRemoved:DeleteMarkerCreated' export const ObjectReducedRedundancyLostObject = 's3:ReducedRedundancyLostObject' +export type NotificationEvent = + | 's3:ObjectCreated:*' + | 's3:ObjectCreated:Put' + | 's3:ObjectCreated:Post' + | 's3:ObjectCreated:Copy' + | 's3:ObjectCreated:CompleteMultipartUpload' + | 's3:ObjectRemoved:*' + | 's3:ObjectRemoved:Delete' + | 's3:ObjectRemoved:DeleteMarkerCreated' + | 's3:ReducedRedundancyLostObject' + | 's3:TestEvent' + | 's3:ObjectRestore:Post' + | 's3:ObjectRestore:Completed' + | 's3:Replication:OperationFailedReplication' + | 's3:Replication:OperationMissedThreshold' + | 's3:Replication:OperationReplicatedAfterThreshold' + | 's3:Replication:OperationNotTracked' + | string // put string at least so auto-complete could work +// TODO: type this +export type NotificationRecord = unknown // Poll for notifications, used in #listenBucketNotification. // Listening constitutes repeatedly requesting s3 whether or not any // changes have occurred. export class NotificationPoller extends EventEmitter { - constructor(client, bucketName, prefix, suffix, events) { + private client: Client + private bucketName: string + private prefix: string + private suffix: string + private events: NotificationEvent[] + private ending: boolean + + constructor(client: Client, bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) { super() this.client = client @@ -142,14 +190,14 @@ export class NotificationPoller extends EventEmitter { return } - let method = 'GET' - var queries = [] + const method = 'GET' + const queries = [] if (this.prefix) { - var prefix = uriEscape(this.prefix) + const prefix = uriEscape(this.prefix) queries.push(`prefix=${prefix}`) } if (this.suffix) { - var suffix = uriEscape(this.suffix) + const suffix = uriEscape(this.suffix) queries.push(`suffix=${suffix}`) } if (this.events) { @@ -157,44 +205,59 @@ export class NotificationPoller extends EventEmitter { } queries.sort() - var query = '' + let query = '' if (queries.length > 0) { query = `${queries.join('&')}` } const region = this.client.region || DEFAULT_REGION - this.client.makeRequest({ method, bucketName: this.bucketName, query }, '', [200], region, true, (e, response) => { - if (e) { - return this.emit('error', e) - } - - let transformer = transformers.getNotificationTransformer() - pipesetup(response, transformer) - .on('data', (result) => { - // Data is flushed periodically (every 5 seconds), so we should - // handle it after flushing from the JSON parser. - let records = result.Records - // If null (= no records), change to an empty array. - if (!records) { - records = [] - } - - // Iterate over the notifications and emit them individually. - records.forEach((record) => { - this.emit('notification', record) - }) - - // If we're done, stop. - if (this.ending) { - response.destroy() - } - }) - .on('error', (e) => this.emit('error', e)) - .on('end', () => { - // Do it again, if we haven't cancelled yet. - process.nextTick(() => { - this.checkForChanges() - }) - }) - }) + + this.client + .makeRequestAsync( + { + method, + bucketName: this.bucketName, + query, + }, + '', + [200], + region, + true, + ) + .then( + (response) => { + const asm = jsonLineParser.make() + + pipesetup(response, asm) + .on('data', (data) => { + // Data is flushed periodically (every 5 seconds), so we should + // handle it after flushing from the JSON parser. + let records = data.value.Records + // If null (= no records), change to an empty array. + if (!records) { + records = [] + } + + // Iterate over the notifications and emit them individually. + records.forEach((record: NotificationRecord) => { + this.emit('notification', record) + }) + + // If we're done, stop. + if (this.ending) { + response?.destroy() + } + }) + .on('error', (e) => this.emit('error', e)) + .on('end', () => { + // Do it again, if we haven't cancelled yet. + process.nextTick(() => { + this.checkForChanges() + }) + }) + }, + (e) => { + return this.emit('error', e) + }, + ) } } diff --git a/src/object-uploader.js b/src/object-uploader.js deleted file mode 100644 index 2fdf6606..00000000 --- a/src/object-uploader.js +++ /dev/null @@ -1,289 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as Crypto from 'node:crypto' -import { Transform } from 'node:stream' - -import * as querystring from 'query-string' - -import { getVersionId, sanitizeETag } from './helpers.js' - -// We extend Transform because Writable does not implement ._flush(). -export class ObjectUploader extends Transform { - constructor(client, bucketName, objectName, partSize, metaData, callback) { - super() - this.emptyStream = true - this.client = client - this.bucketName = bucketName - this.objectName = objectName - // The size of each multipart, chunked by BlockStream2. - this.partSize = partSize - // This is the metadata for the object. - this.metaData = metaData - - // Call like: callback(error, {etag, versionId}). - this.callback = callback - - // We need to keep track of what number chunk/part we're on. This increments - // each time _write() is called. Starts with 1, not 0. - this.partNumber = 1 - - // A list of the previously uploaded chunks, for resuming a file upload. This - // will be null if we aren't resuming an upload. - this.oldParts = null - - // Keep track of the etags for aggregating the chunks together later. Each - // etag represents a single chunk of the file. - this.etags = [] - - // This is for the multipart upload request — if null, we're either not initiated - // yet or we're flushing in one packet. - this.id = null - - // Handle errors. - this.on('error', (err) => { - callback(err) - }) - } - - _transform(chunk, encoding, callback) { - this.emptyStream = false - let method = 'PUT' - let headers = { 'Content-Length': chunk.length } - let md5digest = '' - - // Calculate and set Content-MD5 header if SHA256 is not set. - // This will happen only when there is a secure connection to the s3 server. - if (!this.client.enableSHA256) { - md5digest = Crypto.createHash('md5').update(chunk).digest() - headers['Content-MD5'] = md5digest.toString('base64') - } - // We can flush the object in one packet if it fits in one chunk. This is true - // if the chunk size is smaller than the part size, signifying the end of the - // stream. - if (this.partNumber == 1 && chunk.length < this.partSize) { - // PUT the chunk in a single request — use an empty query. - let options = { - method, - // Set user metadata as this is not a multipart upload - headers: Object.assign({}, this.metaData, headers), - query: '', - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - let result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // Give the etag back, we're done! - - process.nextTick(() => { - this.callback(null, result) - }) - - // Because we're sure the stream has ended, allow it to flush and end. - callback() - }) - - return - } - - // If we aren't flushing in one packet, we need to initiate the multipart upload, - // if it hasn't already been done. The write will be buffered until the upload has been - // initiated. - if (this.id === null) { - this.once('ready', () => { - this._transform(chunk, encoding, callback) - }) - - // Check for an incomplete previous upload. - this.client.findUploadId(this.bucketName, this.objectName, (err, id) => { - if (err) { - return this.emit('error', err) - } - - // If no upload ID exists, initiate a new one. - if (!id) { - this.client.initiateNewMultipartUpload(this.bucketName, this.objectName, this.metaData, (err, id) => { - if (err) { - return callback(err) - } - - this.id = id - - // We are now ready to accept new chunks — this will flush the buffered chunk. - this.emit('ready') - }) - - return - } - - this.id = id - - // Retrieve the pre-uploaded parts, if we need to resume the upload. - this.client.listParts(this.bucketName, this.objectName, id, (err, etags) => { - if (err) { - return this.emit('error', err) - } - - // It is possible for no parts to be already uploaded. - if (!etags) { - etags = [] - } - - // oldParts will become an object, allowing oldParts[partNumber].etag - this.oldParts = etags.reduce(function (prev, item) { - if (!prev[item.part]) { - prev[item.part] = item - } - return prev - }, {}) - - this.emit('ready') - }) - }) - - return - } - - // Continue uploading various parts if we have initiated multipart upload. - let partNumber = this.partNumber++ - - // Check to see if we've already uploaded this chunk. If the hash sums match, - // we can skip to the next chunk. - if (this.oldParts) { - let oldPart = this.oldParts[partNumber] - - // Calulcate the md5 hash, if it has not already been calculated. - if (!md5digest) { - md5digest = Crypto.createHash('md5').update(chunk).digest() - } - - if (oldPart && md5digest.toString('hex') === oldPart.etag) { - // The md5 matches, the chunk has already been uploaded. - this.etags.push({ part: partNumber, etag: oldPart.etag }) - - callback() - return - } - } - - // Write the chunk with an uploader. - let query = querystring.stringify({ - partNumber: partNumber, - uploadId: this.id, - }) - - let options = { - method, - query, - headers, - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - - // In order to aggregate the parts together, we need to collect the etags. - let etag = response.headers.etag - if (etag) { - etag = etag.replace(/^"/, '').replace(/"$/, '') - } - - this.etags.push({ part: partNumber, etag }) - - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // We're ready for the next chunk. - callback() - }) - } - - _flush(callback) { - if (this.emptyStream) { - let method = 'PUT' - let headers = Object.assign({}, this.metaData, { 'Content-Length': 0 }) - let options = { - method, - headers, - query: '', - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, '', [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - - let result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // Give the etag back, we're done! - process.nextTick(() => { - this.callback(null, result) - }) - - // Because we're sure the stream has ended, allow it to flush and end. - callback() - }) - - return - } - // If it has been uploaded in a single packet, we don't have to do anything. - if (this.id === null) { - return - } - - // This is called when all of the chunks uploaded successfully, thus - // completing the multipart upload. - this.client.completeMultipartUpload(this.bucketName, this.objectName, this.id, this.etags, (err, etag) => { - if (err) { - return callback(err) - } - - // Call our callback on the next tick to allow the streams infrastructure - // to finish what its doing before we continue. - process.nextTick(() => { - this.callback(null, etag) - }) - - callback() - }) - } -} - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default ObjectUploader diff --git a/src/postPolicy.ts b/src/postPolicy.ts new file mode 100644 index 00000000..a1092e50 --- /dev/null +++ b/src/postPolicy.ts @@ -0,0 +1,104 @@ +// Build PostPolicy object that can be signed by presignedPostPolicy +import * as errors from './errors.ts' +import type { MetaData } from './helpers.ts' +import { isObject, isValidBucketName, isValidObjectName, isValidPrefix } from './helpers.ts' + +export class PostPolicy { + public policy: { conditions: (string | number)[][]; expiration?: string } + public formData: Record + + constructor() { + this.policy = { + conditions: [], + } + this.formData = {} + } + + // set expiration date + setExpires(date: Date) { + if (!date) { + throw new errors.InvalidDateError('Invalid date: cannot be null') + } + this.policy.expiration = date.toISOString() + } + + // set object name + setKey(objectName: string) { + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name : ${objectName}`) + } + this.policy.conditions.push(['eq', '$key', objectName]) + this.formData.key = objectName + } + + // set object name prefix, i.e policy allows any keys with this prefix + setKeyStartsWith(prefix: string) { + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + this.policy.conditions.push(['starts-with', '$key', prefix]) + this.formData.key = prefix + } + + // set bucket name + setBucket(bucketName: string) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) + } + this.policy.conditions.push(['eq', '$bucket', bucketName]) + this.formData.bucket = bucketName + } + + // set Content-Type + setContentType(type: string) { + if (!type) { + throw new Error('content-type cannot be null') + } + this.policy.conditions.push(['eq', '$Content-Type', type]) + this.formData['Content-Type'] = type + } + + // set Content-Type prefix, i.e image/ allows any image + setContentTypeStartsWith(prefix: string) { + if (!prefix) { + throw new Error('content-type cannot be null') + } + this.policy.conditions.push(['starts-with', '$Content-Type', prefix]) + this.formData['Content-Type'] = prefix + } + + // set Content-Disposition + setContentDisposition(value: string) { + if (!value) { + throw new Error('content-disposition cannot be null') + } + this.policy.conditions.push(['eq', '$Content-Disposition', value]) + this.formData['Content-Disposition'] = value + } + + // set minimum/maximum length of what Content-Length can be. + setContentLengthRange(min: number, max: number) { + if (min > max) { + throw new Error('min cannot be more than max') + } + if (min < 0) { + throw new Error('min should be > 0') + } + if (max < 0) { + throw new Error('max should be > 0') + } + this.policy.conditions.push(['content-length-range', min, max]) + } + + // set user defined metadata + setUserMetaData(metaData: MetaData) { + if (!isObject(metaData)) { + throw new TypeError('metadata should be of type "object"') + } + Object.entries(metaData).forEach(([key, value]) => { + const amzMetaDataKey = `x-amz-meta-${key}` + this.policy.conditions.push(['eq', `$${amzMetaDataKey}`, value]) + this.formData[amzMetaDataKey] = value.toString() + }) + } +} diff --git a/src/qs.ts b/src/qs.ts new file mode 100644 index 00000000..56c17504 --- /dev/null +++ b/src/qs.ts @@ -0,0 +1,7 @@ +import queryString from 'query-string' + +// rfc 3986 encoding. +// `URLSearchParams` and `node:querystring` won't work +export function qs(q: Record): string { + return queryString.stringify(q) +} diff --git a/src/request.ts b/src/request.ts new file mode 100644 index 00000000..6846f6fc --- /dev/null +++ b/src/request.ts @@ -0,0 +1,29 @@ +import * as http from 'node:http' +import * as https from 'node:https' +import type * as stream from 'node:stream' + +export async function request( + opt: https.RequestOptions, + isHttp: boolean, + body: Buffer | string | stream.Readable | undefined = undefined, +): Promise { + const transport = isHttp ? http : https + + return new Promise((resolve, reject) => { + const requestObj = transport.request(opt, (resp) => { + resolve(resp) + }) + + requestObj.on('error', (e: unknown) => { + reject(e) + }) + + if (body) { + if (!Buffer.isBuffer(body) && typeof body !== 'string') { + body.on('error', reject) + } + + requestObj.end(body) + } + }) +} diff --git a/src/response.ts b/src/response.ts new file mode 100644 index 00000000..bb3a0b15 --- /dev/null +++ b/src/response.ts @@ -0,0 +1,26 @@ +import type http from 'node:http' +import type stream from 'node:stream' + +export async function readAsBuffer(res: stream.Readable): Promise { + return new Promise((resolve, reject) => { + const body: Buffer[] = [] + res + .on('data', (chunk: Buffer) => body.push(chunk)) + .on('error', (e) => reject(e)) + .on('end', () => resolve(Buffer.concat(body))) + }) +} + +export async function readAsString(res: http.IncomingMessage): Promise { + const body = await readAsBuffer(res) + return body.toString() +} + +export async function drainResponse(res: stream.Readable): Promise { + return new Promise((resolve, reject) => { + res + .on('data', () => {}) + .on('error', (e) => reject(e)) + .on('end', () => resolve()) + }) +} diff --git a/src/s3-endpoints.js b/src/s3-endpoints.ts similarity index 87% rename from src/s3-endpoints.js rename to src/s3-endpoints.ts index aa6a7921..a3f20e68 100644 --- a/src/s3-endpoints.js +++ b/src/s3-endpoints.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { isString } from './helpers.js' +import { isString } from './helpers.ts' // List of currently supported endpoints. const awsS3Endpoint = { @@ -35,16 +35,20 @@ const awsS3Endpoint = { 'ap-east-1': 's3.ap-east-1.amazonaws.com', 'eu-north-1': 's3.eu-north-1.amazonaws.com', // Add new endpoints here. -} +} as const + +export type Region = keyof typeof awsS3Endpoint | string // getS3Endpoint get relevant endpoint for the region. -export function getS3Endpoint(region) { +export function getS3Endpoint(region: string): string { if (!isString(region)) { throw new TypeError(`Invalid region: ${region}`) } - var endpoint = awsS3Endpoint[region] + + const endpoint = (awsS3Endpoint as Record)[region] if (endpoint) { return endpoint } + return 's3.amazonaws.com' } diff --git a/src/signing.js b/src/signing.ts similarity index 84% rename from src/signing.js rename to src/signing.ts index 247206f6..758cec10 100644 --- a/src/signing.js +++ b/src/signing.ts @@ -16,10 +16,9 @@ import * as Crypto from 'node:crypto' -import _ from 'lodash' - import * as errors from './errors.ts' -import { getScope, isArray, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.js' +import { getScope, isArray, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.ts' +import type { ICanonicalRequest, IRequest, RequestHeaders } from './type.ts' const signV4Algorithm = 'AWS4-HMAC-SHA256' @@ -33,7 +32,13 @@ const signV4Algorithm = 'AWS4-HMAC-SHA256' // \n // // -function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload) { +function getCanonicalRequest( + method: string, + path: string, + headers: RequestHeaders, + signedHeaders: string[], + hashedPayload: string, +): ICanonicalRequest { if (!isString(method)) { throw new TypeError('method should be of type "string"') } @@ -49,12 +54,13 @@ function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload if (!isString(hashedPayload)) { throw new TypeError('hashedPayload should be of type "string"') } + const headersArray = signedHeaders.reduce((acc, i) => { // Trim spaces from the value (required by V4 spec) const val = `${headers[i]}`.replace(/ +/g, ' ') acc.push(`${i.toLowerCase()}:${val}`) return acc - }, []) + }, [] as string[]) const requestResource = path.split('?')[0] let requestQuery = path.split('?')[1] @@ -66,7 +72,7 @@ function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload requestQuery = requestQuery .split('&') .sort() - .map((element) => (element.indexOf('=') === -1 ? element + '=' : element)) + .map((element) => (!element.includes('=') ? element + '=' : element)) .join('&') } @@ -81,7 +87,7 @@ function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload } // generate a credential string -function getCredential(accessKey, region, requestDate, serviceName = 's3') { +function getCredential(accessKey: string, region: string, requestDate?: Date, serviceName = 's3') { if (!isString(accessKey)) { throw new TypeError('accessKey should be of type "string"') } @@ -95,7 +101,7 @@ function getCredential(accessKey, region, requestDate, serviceName = 's3') { } // Returns signed headers array - alphabetically sorted -function getSignedHeaders(headers) { +function getSignedHeaders(headers: RequestHeaders): string[] { if (!isObject(headers)) { throw new TypeError('request should be of type "object"') } @@ -127,13 +133,13 @@ function getSignedHeaders(headers) { // Is skipped for obvious reasons const ignoredHeaders = ['authorization', 'content-length', 'content-type', 'user-agent'] - return _.map(headers, (v, header) => header) - .filter((header) => ignoredHeaders.indexOf(header) === -1) + return Object.keys(headers) + .filter((header) => !ignoredHeaders.includes(header)) .sort() } // returns the key used for calculating signature -function getSigningKey(date, region, secretKey, serviceName = 's3') { +function getSigningKey(date: Date, region: string, secretKey: string, serviceName = 's3') { if (!isObject(date)) { throw new TypeError('date should be of type "object"') } @@ -144,7 +150,7 @@ function getSigningKey(date, region, secretKey, serviceName = 's3') { throw new TypeError('secretKey should be of type "string"') } const dateLine = makeDateShort(date) - let hmac1 = Crypto.createHmac('sha256', 'AWS4' + secretKey) + const hmac1 = Crypto.createHmac('sha256', 'AWS4' + secretKey) .update(dateLine) .digest(), hmac2 = Crypto.createHmac('sha256', hmac1).update(region).digest(), @@ -153,7 +159,7 @@ function getSigningKey(date, region, secretKey, serviceName = 's3') { } // returns the string that needs to be signed -function getStringToSign(canonicalRequest, requestDate, region, serviceName = 's3') { +function getStringToSign(canonicalRequest: ICanonicalRequest, requestDate: Date, region: string, serviceName = 's3') { if (!isString(canonicalRequest)) { throw new TypeError('canonicalRequest should be of type "string"') } @@ -165,17 +171,13 @@ function getStringToSign(canonicalRequest, requestDate, region, serviceName = 's } const hash = Crypto.createHash('sha256').update(canonicalRequest).digest('hex') const scope = getScope(region, requestDate, serviceName) - const stringToSign = [] - stringToSign.push(signV4Algorithm) - stringToSign.push(makeDateLong(requestDate)) - stringToSign.push(scope) - stringToSign.push(hash) - const signString = stringToSign.join('\n') - return signString + const stringToSign = [signV4Algorithm, makeDateLong(requestDate), scope, hash] + + return stringToSign.join('\n') } // calculate the signature of the POST policy -export function postPresignSignatureV4(region, date, secretKey, policyBase64) { +export function postPresignSignatureV4(region: string, date: Date, secretKey: string, policyBase64: string): string { if (!isString(region)) { throw new TypeError('region should be of type "string"') } @@ -193,7 +195,14 @@ export function postPresignSignatureV4(region, date, secretKey, policyBase64) { } // Returns the authorization header -export function signV4(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { +export function signV4( + request: IRequest, + accessKey: string, + secretKey: string, + region: string, + requestDate: Date, + serviceName = 's3', +) { if (!isObject(request)) { throw new TypeError('request should be of type "object"') } @@ -214,7 +223,7 @@ export function signV4(request, accessKey, secretKey, region, requestDate, servi throw new errors.SecretKeyRequiredError('secretKey is required for signing') } - const sha256sum = request.headers['x-amz-content-sha256'] + const sha256sum = request.headers['x-amz-content-sha256'] as string const signedHeaders = getSignedHeaders(request.headers) const canonicalRequest = getCanonicalRequest(request.method, request.path, request.headers, signedHeaders, sha256sum) @@ -229,11 +238,27 @@ export function signV4(request, accessKey, secretKey, region, requestDate, servi .toLowerCase()}, Signature=${signature}` } -export function signV4ByServiceName(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { +export function signV4ByServiceName( + request: IRequest, + accessKey: string, + secretKey: string, + region: string, + requestDate: Date, + serviceName = 's3', +): string { return signV4(request, accessKey, secretKey, region, requestDate, serviceName) } + // returns a presigned URL string -export function presignSignatureV4(request, accessKey, secretKey, sessionToken, region, requestDate, expires) { +export function presignSignatureV4( + request: IRequest, + accessKey: string, + secretKey: string, + sessionToken: string, + region: string, + requestDate: Date, + expires: unknown, +) { if (!isObject(request)) { throw new TypeError('request should be of type "object"') } @@ -294,6 +319,5 @@ export function presignSignatureV4(request, accessKey, secretKey, sessionToken, const stringToSign = getStringToSign(canonicalRequest, requestDate, region) const signingKey = getSigningKey(requestDate, region, secretKey) const signature = Crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex').toLowerCase() - const presignedUrl = request.protocol + '//' + request.headers.host + path + `&X-Amz-Signature=${signature}` - return presignedUrl + return request.protocol + '//' + request.headers.host + path + `&X-Amz-Signature=${signature}` } diff --git a/src/streamify.ts b/src/streamify.ts new file mode 100644 index 00000000..02ab2d65 --- /dev/null +++ b/src/streamify.ts @@ -0,0 +1,30 @@ +import * as stream from 'node:stream' + +const Generator = async function* () {}.constructor + +export class StreamGenerators extends stream.Readable { + private _g: AsyncGenerator + + constructor(g: AsyncGeneratorFunction) { + if (!(g instanceof Generator)) { + throw new TypeError('First argument must be a ES6 Generator') + } + + super({ objectMode: true }) + this._g = g() + } + + async _read() { + try { + const { done, value } = await this._g.next() + + if (done) { + this.push(null) + } else { + this.push(value) + } + } catch (e) { + this.emit('error', e) + } + } +} diff --git a/src/transformers.js b/src/transformers.js deleted file mode 100644 index 4cde9a2a..00000000 --- a/src/transformers.js +++ /dev/null @@ -1,263 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as Crypto from 'node:crypto' - -import JSONParser from 'json-stream' -import _ from 'lodash' -import Through2 from 'through2' - -import * as errors from './errors.ts' -import { isFunction } from './helpers.js' -import * as xmlParsers from './xml-parsers.js' - -// getConcater returns a stream that concatenates the input and emits -// the concatenated output when 'end' has reached. If an optional -// parser function is passed upon reaching the 'end' of the stream, -// `parser(concatenated_data)` will be emitted. -export function getConcater(parser, emitError) { - var objectMode = false - var bufs = [] - - if (parser && !isFunction(parser)) { - throw new TypeError('parser should be of type "function"') - } - - if (parser) { - objectMode = true - } - - return Through2( - { objectMode }, - function (chunk, enc, cb) { - bufs.push(chunk) - cb() - }, - function (cb) { - if (emitError) { - cb(parser(Buffer.concat(bufs).toString())) - // cb(e) would mean we have to emit 'end' by explicitly calling this.push(null) - this.push(null) - return - } - if (bufs.length) { - if (parser) { - this.push(parser(Buffer.concat(bufs).toString())) - } else { - this.push(Buffer.concat(bufs)) - } - } - cb() - }, - ) -} - -// Generates an Error object depending on http statusCode and XML body -export function getErrorTransformer(response) { - var statusCode = response.statusCode - var code, message - if (statusCode === 301) { - code = 'MovedPermanently' - message = 'Moved Permanently' - } else if (statusCode === 307) { - code = 'TemporaryRedirect' - message = 'Are you using the correct endpoint URL?' - } else if (statusCode === 403) { - code = 'AccessDenied' - message = 'Valid and authorized credentials required' - } else if (statusCode === 404) { - code = 'NotFound' - message = 'Not Found' - } else if (statusCode === 405) { - code = 'MethodNotAllowed' - message = 'Method Not Allowed' - } else if (statusCode === 501) { - code = 'MethodNotAllowed' - message = 'Method Not Allowed' - } else { - code = 'UnknownError' - message = `${statusCode}` - } - - var headerInfo = {} - // A value created by S3 compatible server that uniquely identifies - // the request. - headerInfo.amzRequestid = response.headersSent ? response.getHeader('x-amz-request-id') : null - // A special token that helps troubleshoot API replies and issues. - headerInfo.amzId2 = response.headersSent ? response.getHeader('x-amz-id-2') : null - // Region where the bucket is located. This header is returned only - // in HEAD bucket and ListObjects response. - headerInfo.amzBucketRegion = response.headersSent ? response.getHeader('x-amz-bucket-region') : null - - return getConcater((xmlString) => { - let getError = () => { - // Message should be instantiated for each S3Errors. - var e = new errors.S3Error(message) - // S3 Error code. - e.code = code - _.each(headerInfo, (value, key) => { - e[key] = value - }) - return e - } - if (!xmlString) { - return getError() - } - let e - try { - e = xmlParsers.parseError(xmlString, headerInfo) - } catch (ex) { - return getError() - } - return e - }, true) -} - -// A through stream that calculates md5sum and sha256sum -export function getHashSummer(enableSHA256) { - var md5 = Crypto.createHash('md5') - var sha256 = Crypto.createHash('sha256') - - return Through2.obj( - function (chunk, enc, cb) { - if (enableSHA256) { - sha256.update(chunk) - } else { - md5.update(chunk) - } - cb() - }, - function (cb) { - var md5sum = '' - var sha256sum = '' - if (enableSHA256) { - sha256sum = sha256.digest('hex') - } else { - md5sum = md5.digest('base64') - } - var hashData = { md5sum, sha256sum } - this.push(hashData) - this.push(null) - cb() - }, - ) -} - -// Following functions return a stream object that parses XML -// and emits suitable Javascript objects. - -// Parses CopyObject response. -export function getCopyObjectTransformer() { - return getConcater(xmlParsers.parseCopyObject) -} - -// Parses listBuckets response. -export function getListBucketTransformer() { - return getConcater(xmlParsers.parseListBucket) -} - -// Parses listMultipartUploads response. -export function getListMultipartTransformer() { - return getConcater(xmlParsers.parseListMultipart) -} - -// Parses listParts response. -export function getListPartsTransformer() { - return getConcater(xmlParsers.parseListParts) -} - -// Parses initMultipartUpload response. -export function getInitiateMultipartTransformer() { - return getConcater(xmlParsers.parseInitiateMultipart) -} - -// Parses listObjects response. -export function getListObjectsTransformer() { - return getConcater(xmlParsers.parseListObjects) -} - -// Parses listObjects response. -export function getListObjectsV2Transformer() { - return getConcater(xmlParsers.parseListObjectsV2) -} - -// Parses listObjects with metadata response. -export function getListObjectsV2WithMetadataTransformer() { - return getConcater(xmlParsers.parseListObjectsV2WithMetadata) -} - -// Parses completeMultipartUpload response. -export function getCompleteMultipartTransformer() { - return getConcater(xmlParsers.parseCompleteMultipart) -} - -// Parses getBucketLocation response. -export function getBucketRegionTransformer() { - return getConcater(xmlParsers.parseBucketRegion) -} - -// Parses GET/SET BucketNotification response -export function getBucketNotificationTransformer() { - return getConcater(xmlParsers.parseBucketNotification) -} - -// Parses a notification. -export function getNotificationTransformer() { - // This will parse and return each object. - return new JSONParser() -} - -export function bucketVersioningTransformer() { - return getConcater(xmlParsers.parseBucketVersioningConfig) -} - -export function getTagsTransformer() { - return getConcater(xmlParsers.parseTagging) -} - -export function lifecycleTransformer() { - return getConcater(xmlParsers.parseLifecycleConfig) -} - -export function objectLockTransformer() { - return getConcater(xmlParsers.parseObjectLockConfig) -} - -export function objectRetentionTransformer() { - return getConcater(xmlParsers.parseObjectRetentionConfig) -} -export function bucketEncryptionTransformer() { - return getConcater(xmlParsers.parseBucketEncryptionConfig) -} - -export function replicationConfigTransformer() { - return getConcater(xmlParsers.parseReplicationConfig) -} - -export function objectLegalHoldTransformer() { - return getConcater(xmlParsers.parseObjectLegalHoldConfig) -} - -export function uploadPartTransformer() { - return getConcater(xmlParsers.uploadPartParser) -} -export function selectObjectContentTransformer() { - return getConcater() -} - -export function removeObjectsTransformer() { - return getConcater(xmlParsers.removeObjectsParser) -} diff --git a/src/transformers.ts b/src/transformers.ts new file mode 100644 index 00000000..a387b9f8 --- /dev/null +++ b/src/transformers.ts @@ -0,0 +1,161 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as crypto from 'node:crypto' +import type { ServerResponse } from 'node:http' +import type * as stream from 'node:stream' + +import Through2 from 'through2' + +import * as errors from './errors.ts' +import { isFunction } from './helpers.ts' +import * as xmlParsers from './xml-parsers.ts' + +// getConcater returns a stream that concatenates the input and emits +// the concatenated output when 'end' has reached. If an optional +// parser function is passed upon reaching the 'end' of the stream, +// `parser(concatenated_data)` will be emitted. +export function getConcater(parser?: undefined | ((xml: string) => any), emitError?: boolean): stream.Transform { + let objectMode = false + const bufs: Buffer[] = [] + + if (parser && !isFunction(parser)) { + throw new TypeError('parser should be of type "function"') + } + + if (parser) { + objectMode = true + } + + return Through2( + { objectMode }, + function (chunk, enc, cb) { + bufs.push(chunk) + cb() + }, + function (cb) { + if (emitError) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + cb(parser(Buffer.concat(bufs).toString())) + // cb(e) would mean we have to emit 'end' by explicitly calling this.push(null) + this.push(null) + return + } + if (bufs.length) { + if (parser) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.push(parser(Buffer.concat(bufs).toString())) + } else { + this.push(Buffer.concat(bufs)) + } + } + cb() + }, + ) +} + +// Generates an Error object depending on http statusCode and XML body +export function getErrorTransformer(response: ServerResponse) { + const statusCode = response.statusCode + let code: string, message: string + if (statusCode === 301) { + code = 'MovedPermanently' + message = 'Moved Permanently' + } else if (statusCode === 307) { + code = 'TemporaryRedirect' + message = 'Are you using the correct endpoint URL?' + } else if (statusCode === 403) { + code = 'AccessDenied' + message = 'Valid and authorized credentials required' + } else if (statusCode === 404) { + code = 'NotFound' + message = 'Not Found' + } else if (statusCode === 405) { + code = 'MethodNotAllowed' + message = 'Method Not Allowed' + } else if (statusCode === 501) { + code = 'MethodNotAllowed' + message = 'Method Not Allowed' + } else { + code = 'UnknownError' + message = `${statusCode}` + } + + const headerInfo: Record = {} + // A value created by S3 compatible server that uniquely identifies the request. + headerInfo.amzRequestid = response.headersSent ? (response.getHeader('x-amz-request-id') as string | undefined) : null + // A special token that helps troubleshoot API replies and issues. + headerInfo.amzId2 = response.headersSent ? (response.getHeader('x-amz-id-2') as string | undefined) : null + // Region where the bucket is located. This header is returned only + // in HEAD bucket and ListObjects response. + headerInfo.amzBucketRegion = response.headersSent + ? (response.getHeader('x-amz-bucket-region') as string | undefined) + : null + + return getConcater((xmlString) => { + const getError = () => { + // Message should be instantiated for each S3Errors. + const e = new errors.S3Error(message, { cause: headerInfo }) + // S3 Error code. + e.code = code + Object.entries(headerInfo).forEach(([key, value]) => { + // @ts-expect-error force set error properties + e[key] = value + }) + return e + } + if (!xmlString) { + return getError() + } + let e + try { + e = xmlParsers.parseError(xmlString, headerInfo) + } catch (ex) { + return getError() + } + return e + }, true) +} + +export function hashBinary(buf: Buffer, enableSHA256: boolean) { + let sha256sum = '' + if (enableSHA256) { + sha256sum = crypto.createHash('sha256').update(buf).digest('hex') + } + const md5sum = crypto.createHash('md5').update(buf).digest('base64') + + return { md5sum, sha256sum } +} + +// Following functions return a stream object that parses XML +// and emits suitable Javascript objects. + +// Parses listMultipartUploads response. +export function getListMultipartTransformer() { + return getConcater(xmlParsers.parseListMultipart) +} + +// Parses listObjects response. +export function getListObjectsV2Transformer() { + return getConcater(xmlParsers.parseListObjectsV2) +} + +// Parses listObjects with metadata response. +export function getListObjectsV2WithMetadataTransformer() { + return getConcater(xmlParsers.parseListObjectsV2WithMetadata) +} diff --git a/src/type.ts b/src/type.ts new file mode 100644 index 00000000..d0d9e320 --- /dev/null +++ b/src/type.ts @@ -0,0 +1,239 @@ +import type { Readable as ReadableStream } from 'node:stream' + +export type Binary = string | Buffer +export type RequestHeaders = Record + +export interface IRequest { + protocol: string + port?: number | string + method: string + path: string + headers: RequestHeaders +} + +export type ICanonicalRequest = string + +export interface ICredentials { + accessKey: string + secretKey: string + sessionToken?: string +} + +export type UploadID = string + +export type LockUnit = 'Days' | 'Years' +export type LegalHoldStatus = 'ON' | 'OFF' +export type NoResultCallback = (error: unknown | null) => void +export type ResultCallback = (error: unknown | null, result: T) => void +export type TagList = Record +export type EmptyObject = Record +export type VersionIdentification = { versionId?: string } +export type Lifecycle = LifecycleConfig | null | '' +export type Lock = LockConfig | EmptyObject +export type Encryption = EncryptionConfig | EmptyObject +export type Retention = RetentionOptions | EmptyObject +export type IsoDate = string + +export type GetObjectOpt = { + versionId?: string +} + +export interface BucketItemCopy { + etag: string + lastModified?: Date +} + +export interface BucketItem { + name: string + prefix: string + size: number + etag: string + lastModified: Date +} + +export interface BucketItemWithMetadata extends BucketItem { + metadata: ItemBucketMetadata | ItemBucketMetadataList +} + +export type StatObjectOpts = { + versionId?: string +} + +export interface BucketItemStat { + size: number + etag: string + lastModified: Date + metaData: ItemBucketMetadata + // version id of the object if available + versionId: string | null +} + +export interface IncompleteUploadedBucketItem { + key: string + uploadId: string + size: number +} + +export interface BucketStream extends ReadableStream { + on(event: 'data', listener: (item: T) => void): this + + on(event: 'end' | 'pause' | 'readable' | 'resume' | 'close', listener: () => void): this + + on(event: 'error', listener: (err: Error) => void): this + + on(event: string | symbol, listener: (...args: any[]) => void): this +} + +export interface PostPolicyResult { + postURL: string + formData: { + [key: string]: any + } +} + +export interface MetadataItem { + Key: string + Value: string +} + +export interface ItemBucketMetadataList { + Items: MetadataItem[] +} + +export interface ItemBucketMetadata { + [key: string]: any +} + +export interface UploadedObjectInfo { + etag: string + versionId: string | null +} + +export interface Tag { + Key: string + Value: string +} + +export interface LifecycleConfig { + Rule: LifecycleRule[] +} + +export interface LifecycleRule { + [key: string]: any +} + +export interface LockConfig { + objectLockEnabled?: 'Enabled' + mode: Mode + unit: LockUnit + validity: number +} + +export interface EncryptionConfig { + Rule: EncryptionRule[] +} + +export interface EncryptionRule { + [key: string]: any +} + +export interface ReplicationConfig { + role: string + rules: [] +} + +export interface ReplicationConfig { + [key: string]: any +} + +export interface RetentionOptions { + versionId: string + mode?: Mode + retainUntilDate?: IsoDate + governanceBypass?: boolean +} + +export interface LegalHoldOptions { + versionId?: string + status: LegalHoldStatus +} + +export interface InputSerialization { + CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' + CSV?: { + AllowQuotedRecordDelimiter?: boolean + Comments?: string + FieldDelimiter?: string + FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' + QuoteCharacter?: string + QuoteEscapeCharacter?: string + RecordDelimiter?: string + } + JSON?: { + Type: 'DOCUMENT' | 'LINES' + } + Parquet?: EmptyObject +} + +export interface OutputSerialization { + CSV?: { + FieldDelimiter?: string + QuoteCharacter?: string + QuoteEscapeCharacter?: string + QuoteFields?: string + RecordDelimiter?: string + } + JSON?: { + RecordDelimiter?: string + } +} + +export interface SelectOptions { + expression: string + expressionType?: string + inputSerialization: InputSerialization + outputSerialization: OutputSerialization + requestProgress?: { Enabled: boolean } + scanRange?: { Start: number; End: number } +} + +export interface SourceObjectStats { + size: number + metaData: string + lastModicied: Date + versionId: string + etag: string +} + +export interface MakeBucketOpt { + ObjectLocking?: boolean +} + +export interface RemoveOptions { + versionId?: string + forceDelete?: boolean + governanceBypass?: boolean +} + +export interface BucketItemFromList { + name: string + // date when bucket was created + creationDate: Date +} + +export type VersioningConfig = Record + +export interface VersionConfigInput { + Status?: string + MfaDelete?: string + + [key: string]: any +} + +export type Mode = 'COMPLIANCE' | 'GOVERNANCE' + +export type ListObjectV1Opt = { + Delimiter?: string + MaxKeys?: number + IncludeVersion?: boolean +} diff --git a/src/typed-client.ts b/src/typed-client.ts new file mode 100644 index 00000000..e65c6096 --- /dev/null +++ b/src/typed-client.ts @@ -0,0 +1,1708 @@ +import * as stream from 'node:stream' + +import { TextEncoder } from 'web-encoding' +import xml2js from 'xml2js' + +import { asCallback, asCallbackFn } from './as-callback.ts' +import { fsp } from './async.ts' +import type { RequestMethod, RequestOption } from './client.ts' +import { Client, findCallback } from './client.ts' +import * as errors from './errors.ts' +import type { MetaData, SelectResults } from './helpers.ts' +import { + getScope, + insertContentType, + isArray, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isString, + isValidBucketName, + isValidDate, + isValidObjectName, + isValidPrefix, + LEGAL_HOLD_STATUS, + makeDateLong, + prependXAMZMeta, + RETENTION_MODES, + toMd5, + uriEscape, +} from './helpers.ts' +import { PostPolicy } from './postPolicy.ts' +import { qs } from './qs.ts' +import { readAsBuffer } from './response.ts' +import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' +import * as transformers from './transformers.ts' +import type { + BucketStream, + Encryption, + LegalHoldOptions, + Lifecycle, + ListObjectV1Opt, + NoResultCallback, + PostPolicyResult, + RemoveOptions, + RequestHeaders, + ResultCallback, + Retention, + SelectOptions, + Tag, + TagList, + UploadedObjectInfo, + VersionConfigInput, + VersionIdentification, + VersioningConfig, +} from './type.ts' +import type { S3ListObject } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' +import { parseSelectObjectContentResponse } from './xml-parsers.ts' + +export class TypedClient extends Client { + getBucketVersioning(bucketName: string, callback: ResultCallback): void + getBucketVersioning(bucketName: string): Promise + + getBucketVersioning(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + const method = 'GET' + const query = 'versioning' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketVersioningConfig(body.toString()) + }) + } + + setBucketVersioning(bucketName: string, versioningConfig: VersionConfigInput, callback: NoResultCallback): void + setBucketVersioning(bucketName: string, versioningConfig: VersionConfigInput): Promise + setBucketVersioning( + bucketName: string, + versionConfig: VersionConfigInput, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!Object.keys(versionConfig).length) { + throw new errors.InvalidArgumentError('versionConfig should be of type "object"') + } + + const method = 'PUT' + const query = 'versioning' + const builder = new xml2js.Builder({ + rootName: 'VersioningConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(versionConfig) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, payload) + }) + } + + /** + * Set the policy on a bucket or an object prefix. + * + * @param bucketName - name of the bucket + * @param bucketPolicy - bucket policy (JSON stringify'ed) + */ + setBucketPolicy(bucketName: string, bucketPolicy: string): Promise + setBucketPolicy(bucketName: string, bucketPolicy: string, callback: NoResultCallback): void + + setBucketPolicy(bucketName: string, policy: string, cb?: NoResultCallback): void | Promise { + // Validate arguments. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isString(policy)) { + throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`) + } + + let method: RequestMethod = 'DELETE' + const query = 'policy' + + if (policy) { + method = 'PUT' + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + }, + policy, + [204], + '', + ) + }) + } + + /** + * Set the policy on a bucket or an object prefix. + */ + getBucketPolicy(bucketName: string, callback: ResultCallback): void + getBucketPolicy(bucketName: string): Promise + + getBucketPolicy(bucketName: string, cb?: ResultCallback): void | Promise { + // Validate arguments. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + const method = 'GET' + const query = 'policy' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }, '', [200], '') + const body = await readAsBuffer(res) + return body.toString() + }) + } + + /** + * Get Tags associated with a Bucket + */ + getBucketTagging(bucketName: string, callback: ResultCallback): void + getBucketTagging(bucketName: string): Promise + + getBucketTagging(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + const method = 'GET' + const query = 'tagging' + const requestOptions: RequestOption = { method, bucketName, query } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseTagging(body.toString()) + }) + } + + /** Remove Tags on an Bucket/Object based on params + * __Arguments__ + * bucketName _string_ + * objectName _string_ (optional) + * removeOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + protected async removeTagging({ + bucketName, + objectName, + removeOpts, + }: { + removeOpts?: { versionId?: string } + bucketName: string + objectName?: string + }) { + const method = 'DELETE' + let query = 'tagging' + + if (removeOpts && removeOpts.versionId) { + query = `${query}&versionId=${removeOpts.versionId}` + } + const requestOptions: RequestOption = { method, bucketName, objectName, query } + + if (objectName) { + requestOptions['objectName'] = objectName + } + + await this.makeRequestAsync(requestOptions, '', [200, 204], '') + } + + /** + * Remove Tags associated with a bucket + */ + removeBucketTagging(bucketName: string, callback: NoResultCallback): void + removeBucketTagging(bucketName: string): Promise + + /** Remove Tags associated with a bucket + * __Arguments__ + * bucketName _string_ + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeBucketTagging(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + return asCallback(cb, this.removeTagging({ bucketName })) + } + + /** + * Set Tags on a Bucket + * + */ + setBucketTagging(bucketName: string, tags: TagList, callback: NoResultCallback): void + setBucketTagging(bucketName: string, tags: TagList): Promise + + setBucketTagging(bucketName: string, tags: TagList, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(tags)) { + throw new errors.InvalidArgumentError('tags should be of type "object"') + } + if (Object.keys(tags).length > 10) { + throw new errors.InvalidArgumentError('maximum tags allowed is 10"') + } + + return asCallback(cb, this.setTagging({ bucketName, tags })) + } + + getBucketLifecycle(bucketName: string, callback: ResultCallback): void + getBucketLifecycle(bucketName: string): Promise + + /** + * Get lifecycle configuration on a bucket. + */ + getBucketLifecycle(bucketName: string, cb?: ResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'GET' + const query = 'lifecycle' + const requestOptions: RequestOption = { method, bucketName, query } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseLifecycleConfig(body.toString()) + }) + } + + removeBucketLifecycle(bucketName: string, callback: NoResultCallback): void + removeBucketLifecycle(bucketName: string): Promise + + /** + * Remove lifecycle configuration of a bucket. + */ + removeBucketLifecycle(bucketName: string, cb?: NoResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'DELETE' + const query = 'lifecycle' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [204]) + }) + } + + // presignedPostPolicy can be used in situations where we want more control on the upload than what + // presignedPutObject() provides. i.e Using presignedPostPolicy we will be able to put policy restrictions + + // return PostPolicy object + newPostPolicy() { + return new PostPolicy() + } + + /** + * Put lifecycle configuration on a bucket. + * Apply lifecycle configuration on a bucket. + * + * this method is not documented yet so it's marked as `protected`, ts will not emit it in type definition + * + * @param bucketName + * @param policyConfig - a valid policy configuration object. + */ + protected async applyBucketLifecycle(bucketName: string, policyConfig: Lifecycle): Promise { + const method = 'PUT' + const query = 'lifecycle' + + const encoder = new TextEncoder() + const builder = new xml2js.Builder({ + rootName: 'LifecycleConfiguration', + headless: true, + renderOpts: { pretty: false }, + }) + + const payload = encoder.encode(builder.buildObject(policyConfig)) + const headers: RequestHeaders = { 'Content-MD5': toMd5(payload) } + await this.makeRequestAsyncOmit({ method, bucketName, query, headers }, payload) + } + + /** Set/Override lifecycle configuration on a bucket. if the configuration is empty, it removes the configuration. + * + * @param bucketName + * @param lifecycleConfig - null or empty object will remove bucket life cycle + * @param callback - if no callback, a promise will be returned + */ + setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle | null, callback: NoResultCallback): void + setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle | null): Promise + + setBucketLifecycle(bucketName: string, lifeCycleConfig: Lifecycle | null = null, cb?: NoResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + return asCallbackFn(cb, async () => { + if (isEmpty(lifeCycleConfig)) { + await this.removeBucketLifecycle(bucketName) + } else { + await this.applyBucketLifecycle(bucketName, lifeCycleConfig) + } + }) + } + + // List the objects in the bucket. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `listOpts _object_: query params to list object with below keys + // * listOpts.MaxKeys _int_ maximum number of keys to return + // * listOpts.IncludeVersion _bool_ true|false to include versions. + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + // * `obj.lastModified` _Date_: modified time stamp + // * `obj.isDeleteMarker` _boolean_: true if it is a delete marker + + listObjects( + bucketName: string, + prefix: string, + recursive: boolean, + listOpts: { + MaxKeys?: number + IncludeVersion?: boolean + } = {}, + ): BucketStream { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isObject(listOpts)) { + throw new TypeError('listOpts should be of type "object"') + } + const listQueryOpts = { + Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/' + MaxKeys: 1000, + IncludeVersion: listOpts.IncludeVersion, + } + let objects: S3ListObject[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + + let marker = '' + // eslint-disable-next-line @typescript-eslint/no-misused-promises + readStream._read = async () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + + try { + const result = await this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts) + while (!ended) { + if (result.isTruncated) { + marker = result.nextMarker || (result.versionIdMarker as string) + } else { + ended = true + } + objects = result.objects + // @ts-expect-error next read + readStream._read() + } + } catch (e) { + readStream.emit('error', e) + } + } + + return readStream + } + + // list a batch of objects + protected async listObjectsQuery( + bucketName: string, + prefix: string, + marker: string, + { + Delimiter, + MaxKeys, + IncludeVersion, + }: Partial> & Required>, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(marker)) { + throw new TypeError('marker should be of type "string"') + } + + if (!isString(Delimiter)) { + throw new TypeError('Delimiter should be of type "string"') + } + if (!isNumber(MaxKeys)) { + throw new TypeError('MaxKeys should be of type "number"') + } + + const queries = [] + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(Delimiter)}`) + queries.push(`encoding-type=url`) + + if (IncludeVersion) { + queries.push(`versions`) + } + + if (marker) { + marker = uriEscape(marker) + if (IncludeVersion) { + queries.push(`key-marker=${marker}`) + } else { + queries.push(`marker=${marker}`) + } + } + + // no need to escape maxKeys + if (MaxKeys) { + if (MaxKeys >= 1000) { + MaxKeys = 1000 + } + queries.push(`max-keys=${MaxKeys}`) + } + queries.sort() + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + + const method = 'GET' + + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + + return xmlParsers.parseListObjects(body.toString()) + } + + putObjectRetention(bucketName: string, objectName: string, callback: NoResultCallback): void + putObjectRetention( + bucketName: string, + objectName: string, + retentionOptions: Retention, + callback: NoResultCallback, + ): void + putObjectRetention(bucketName: string, objectName: string, retentionOptions?: Retention): Promise + + putObjectRetention( + bucketName: string, + objectName: string, + retentionOptsOrCallback?: Retention | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let retentionOpts: Retention = {} + let cb: undefined | NoResultCallback + if (isFunction(retentionOptsOrCallback)) { + cb = retentionOptsOrCallback + } else { + retentionOpts = retentionOptsOrCallback as Retention + cb = callback + } + + if (!isObject(retentionOpts)) { + throw new errors.InvalidArgumentError('retentionOpts should be of type "object"') + } else { + if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) { + throw new errors.InvalidArgumentError(`Invalid value for governanceBypass: ${retentionOpts.governanceBypass}`) + } + if ( + retentionOpts.mode && + ![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode) + ) { + throw new errors.InvalidArgumentError(`Invalid object retention mode: ${retentionOpts.mode}`) + } + if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) { + throw new errors.InvalidArgumentError(`Invalid value for retainUntilDate: ${retentionOpts.retainUntilDate}`) + } + if (retentionOpts.versionId && !isString(retentionOpts.versionId)) { + throw new errors.InvalidArgumentError(`Invalid value for versionId: ${retentionOpts.versionId}`) + } + } + + const method = 'PUT' + let query = 'retention' + + const headers: RequestHeaders = {} + if (retentionOpts.governanceBypass) { + headers['X-Amz-Bypass-Governance-Retention'] = true + } + + const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true }) + const params: Record = {} + + if (retentionOpts.mode) { + params.Mode = retentionOpts.mode + } + if (retentionOpts.retainUntilDate) { + params.RetainUntilDate = retentionOpts.retainUntilDate + } + if (retentionOpts.versionId) { + query += `&versionId=${retentionOpts.versionId}` + } + + const payload = builder.buildObject(params) + + headers['Content-MD5'] = toMd5(payload) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + objectName, + query, + headers, + }, + payload, + [200, 204], + ) + }) + } + + getBucketEncryption(bucketName: string, callback: ResultCallback): void + getBucketEncryption(bucketName: string): Promise + getBucketEncryption(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isOptionalFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'encryption' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketEncryptionConfig(body.toString()) + }) + } + + setBucketEncryption(bucketName: string, encryptionConfig: Encryption, callback: NoResultCallback): void + setBucketEncryption(bucketName: string, encryptionConfig: Encryption): Promise + setBucketEncryption( + bucketName: string, + encryptionConfigOrCallback: Encryption | NoResultCallback | undefined, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + let encryptionConfig: Encryption | undefined + let cb: NoResultCallback | undefined + + if (isFunction(encryptionConfigOrCallback)) { + cb = encryptionConfigOrCallback + encryptionConfig = undefined + } else { + encryptionConfig = encryptionConfigOrCallback + cb = callback + } + + if (!isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) { + throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed: ' + encryptionConfig.Rule) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + let encryptionObj = encryptionConfig + if (isEmpty(encryptionConfig)) { + encryptionObj = { + // Default MinIO Server Supported Rule + Rule: [ + { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: 'AES256', + }, + }, + ], + } + } + + const method = 'PUT' + const query = 'encryption' + const builder = new xml2js.Builder({ + rootName: 'ServerSideEncryptionConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(encryptionObj) + + const headers: RequestHeaders = {} + headers['Content-MD5'] = toMd5(payload) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + /** + * Remove the specified object. + */ + removeObject(bucketName: string, objectName: string, removeOpts: RemoveOptions, callback: NoResultCallback): void + removeObject(bucketName: string, objectName: string, callback: NoResultCallback): void + removeObject(bucketName: string, objectName: string, removeOpts?: RemoveOptions): Promise + removeObject( + bucketName: string, + objectName: string, + removeOptsOrCallback: RemoveOptions | NoResultCallback = {}, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let removeOpts: RemoveOptions = {} + let cb: NoResultCallback | undefined + + // backward compatibility + if (isFunction(removeOptsOrCallback)) { + cb = removeOptsOrCallback + } else { + removeOpts = removeOptsOrCallback + cb = callback + } + + if (!isObject(removeOpts)) { + throw new errors.InvalidArgumentError('removeOpts should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'DELETE' + const queryParams: Record = {} + + if (removeOpts.versionId) { + queryParams.versionId = `${removeOpts.versionId}` + } + const headers: RequestHeaders = {} + if (removeOpts.governanceBypass) { + headers['X-Amz-Bypass-Governance-Retention'] = true + } + if (removeOpts.forceDelete) { + headers['x-minio-force-delete'] = true + } + + const query = qs(queryParams) + + const requestOptions: RequestOption = { method, bucketName, objectName, headers } + if (query) { + requestOptions['query'] = query + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit(requestOptions, '', [200, 204]) + }) + } + + /** + * Generate a generic pre-signed URL which can be used for HTTP methods GET, PUT, HEAD and DELETE + * + * @param httpMethod - name of the HTTP method + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param expires - expiry in seconds (optional, default 7 days) + * @param reqParams - request parameters (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} + * @param requestDate - A date object, the url will be issued at (optional) + */ + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expires?: number, + reqParams?: Record, + requestDate?: Date, + ): Promise + + presignedUrl(httpMethod: string, bucketName: string, objectName: string, callback: ResultCallback): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + reqParams: Record, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + reqParams: Record, + requestDate: Date, + callback: ResultCallback, + ): void + + presignedUrl( + method: 'GET' | 'DELETE' | 'PUT' | 'POST', + bucketName: string, + objectName: string, + // expires?: number, + // reqParams?: Record, + // requestDate?: Date, + // callback?: ResultCallback, + ...originalArgs: unknown[] + ): void | Promise { + if (this.anonymous) { + throw new errors.AnonymousRequestError('Presigned ' + method + ' url cannot be generated for anonymous requests') + } + + let [[expires, reqParams, requestDate], cb] = findCallback< + [number, Record, Date], + ResultCallback + >(originalArgs) + + expires = expires ?? 24 * 60 * 60 * 7 // 7 days in seconds + reqParams = reqParams ?? {} + requestDate = requestDate ?? new Date() + + if (!isNumber(expires)) { + throw new TypeError(`expires should be of type "number", got ${expires}`) + } + if (!isObject(reqParams)) { + throw new TypeError(`reqParams should be of type "object", got ${reqParams}`) + } + if (!isValidDate(requestDate)) { + throw new TypeError(`requestDate should be of type "Date" and valid, got ${requestDate}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const query = qs(reqParams) + return asCallbackFn(cb, async () => { + const region = await this.getBucketRegionAsync(bucketName) + + const reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query }) + void this.checkAndRefreshCreds() + return presignSignatureV4( + reqOptions, + this.accessKey, + this.secretKey, + this.sessionToken!, + region, + requestDate, + expires, + ) + }) + } + + /** + * Generate a presigned URL for GET + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param expires - expiry in seconds (optional, default 7 days) + * @param respHeaders - response headers to override or request params for query (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} + * @param requestDate - A date object, the url will be issued at (optional) + */ + presignedGetObject( + bucketName: string, + objectName: string, + expires?: number, + respHeaders?: Record, + requestDate?: Date, + ): Promise + + presignedGetObject(bucketName: string, objectName: string, callback: ResultCallback): void + presignedGetObject(bucketName: string, objectName: string, expires: number, callback: ResultCallback): void + presignedGetObject( + bucketName: string, + objectName: string, + expires: number, + respHeaders: Record, + callback: ResultCallback, + ): void + presignedGetObject( + bucketName: string, + objectName: string, + expires: number, + respHeaders: Record, + requestDate: Date, + callback: ResultCallback, + ): void + + presignedGetObject( + bucketName: string, + objectName: string, + expires?: unknown, + respHeaders?: unknown, + requestDate?: unknown, + cb?: unknown, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (isFunction(respHeaders)) { + cb = respHeaders + respHeaders = {} + requestDate = new Date() + } + + const validRespHeaders = [ + 'response-content-type', + 'response-content-language', + 'response-expires', + 'response-cache-control', + 'response-content-disposition', + 'response-content-encoding', + ] + validRespHeaders.forEach((header) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) { + throw new TypeError(`response header ${header} should be of type "string"`) + } + }) + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore presignedUrl will check type values, just leave it here for future refactor. + return this.presignedUrl('GET', bucketName, objectName, expires as number, respHeaders, requestDate as Date, cb) + } + + presignedPutObject(bucketName: string, objectName: string, callback: ResultCallback): void + presignedPutObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void + presignedPutObject(bucketName: string, objectName: string, expiry?: number): Promise + + // * `expiry` _number_: expiry in seconds (optional, default 7 days) + presignedPutObject( + bucketName: string, + objectName: string, + expires?: number | ResultCallback, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + return this.presignedUrl('PUT', bucketName, objectName, expires as number, cb) + } + + presignedPostPolicy(policy: PostPolicy, callback: ResultCallback): void + presignedPostPolicy(policy: PostPolicy): Promise + presignedPostPolicy(postPolicy: PostPolicy, cb?: ResultCallback): void | Promise { + return asCallbackFn(cb, async () => { + if (this.anonymous) { + throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests') + } + if (!isObject(postPolicy)) { + throw new TypeError('postPolicy should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + // @ts-expect-error index check + const region = await this.getBucketRegionAsync(postPolicy.formData.bucket) + const date = new Date() + const dateStr = makeDateLong(date) + void this.checkAndRefreshCreds() + + if (!postPolicy.policy.expiration) { + // 'expiration' is mandatory field for S3. + // Set default expiration date of 7 days. + const expires = new Date() + expires.setSeconds(24 * 60 * 60 * 7) + postPolicy.setExpires(expires) + } + + postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr]) + postPolicy.formData['x-amz-date'] = dateStr + + postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256']) + postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' + + postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)]) + postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date) + + if (this.sessionToken) { + postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken]) + postPolicy.formData['x-amz-security-token'] = this.sessionToken + } + + const policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64') + + postPolicy.formData.policy = policyBase64 + + postPolicy.formData['x-amz-signature'] = postPresignSignatureV4(region, date, this.secretKey, policyBase64) + const opts: RequestOption = { method: 'POST', region: region, bucketName: postPolicy.formData.bucket } + const reqOptions = this.getRequestOptions(opts) + const portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}` + const urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}` + return { postURL: urlStr, formData: postPolicy.formData } + }) + } + + setObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void + setObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions: VersionIdentification, + callback: NoResultCallback, + ): void + setObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions?: VersionIdentification, + ): Promise + + /** Set Tags on an Object + * __Arguments__ + * bucketName _string_ + * objectName _string_ + * * tags _object_ of the form {'':'','':''} + * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + setObjectTagging( + bucketName: string, + objectName: string, + tagsArg: TagList, + putOptsArg?: VersionIdentification | NoResultCallback, + cbArg?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + let [[tags, putOpts], cb] = findCallback<[TagList, VersionIdentification?], NoResultCallback>([ + tagsArg, + putOptsArg, + cbArg, + ]) + putOpts = putOpts ?? {} + + if (!isObject(tags)) { + throw new errors.InvalidArgumentError('tags should be of type "object"') + } + if (Object.keys(tags).length > 10) { + throw new errors.InvalidArgumentError('Maximum tags allowed is 10"') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallback(cb, this.setTagging({ bucketName, objectName, tags, putOpts })) + } + + /** To set Tags on a bucket or object based on the params + * __Arguments__ + * taggingParams _object_ Which contains the following properties + * bucketName _string_, + * objectName _string_ (Optional), + * tags _object_ of the form {'':'','':''} + * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + async setTagging({ + bucketName, + objectName, + putOpts = {}, + tags, + }: { + tags: TagList + putOpts?: VersionIdentification + bucketName: string + objectName?: string + }): Promise { + const method = 'PUT' + let query = 'tagging' + + if (putOpts && putOpts.versionId) { + query = `${query}&versionId=${putOpts.versionId}` + } + const tagsList = [] + for (const [key, value] of Object.entries(tags)) { + tagsList.push({ Key: key, Value: value }) + } + const taggingConfig = { + Tagging: { + TagSet: { + Tag: tagsList, + }, + }, + } + const encoder = new TextEncoder() + const headers: RequestHeaders = {} + const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } }) + const payload = encoder.encode(builder.buildObject(taggingConfig)) + headers['Content-MD5'] = toMd5(payload) + const requestOptions: RequestOption = { method, bucketName, query, headers } + + if (objectName) { + requestOptions['objectName'] = objectName + } + headers['Content-MD5'] = toMd5(payload) + + await this.makeRequestAsyncOmit(requestOptions, payload) + } + + removeObjectTagging(bucketName: string, objectName: string, callback: NoResultCallback): void + removeObjectTagging( + bucketName: string, + objectName: string, + removeOptions: VersionIdentification, + callback: NoResultCallback, + ): void + removeObjectTagging(bucketName: string, objectName: string, removeOptions?: VersionIdentification): Promise + + /** Remove tags associated with an object + * __Arguments__ + * bucketName _string_ + * objectName _string_ + * removeOpts _object_ (Optional) e.g. {VersionID:"my-object-version-id"} + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeObjectTagging( + bucketName: string, + objectName: string, + removeOptsArg?: VersionIdentification | NoResultCallback, + cbArg?: NoResultCallback, + ): Promise | void { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + const [[removeOpts], cb] = findCallback<[VersionIdentification?], NoResultCallback>([removeOptsArg, cbArg]) + if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) { + throw new errors.InvalidArgumentError('removeOpts should be of type "object"') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallback(cb, this.removeTagging({ bucketName, objectName, removeOpts })) + } + + selectObjectContent( + bucketName: string, + objectName: string, + selectOpts: SelectOptions, + callback: ResultCallback, + ): void + selectObjectContent(bucketName: string, objectName: string, selectOpts: SelectOptions): Promise + + selectObjectContent( + bucketName: string, + objectName: string, + selectOpts: SelectOptions, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isEmpty(selectOpts)) { + if (!isString(selectOpts.expression)) { + throw new TypeError('sqlExpression should be of type "string"') + } + if (!isEmpty(selectOpts.inputSerialization)) { + if (!isObject(selectOpts.inputSerialization)) { + throw new TypeError('inputSerialization should be of type "object"') + } + } else { + throw new TypeError('inputSerialization is required') + } + if (!isEmpty(selectOpts.outputSerialization)) { + if (!isObject(selectOpts.outputSerialization)) { + throw new TypeError('outputSerialization should be of type "object"') + } + } else { + throw new TypeError('outputSerialization is required') + } + } else { + throw new TypeError('valid select configuration is required') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'POST' + let query = `select` + query += '&select-type=2' + + const config: unknown[] = [ + { + Expression: selectOpts.expression, + }, + { + ExpressionType: selectOpts.expressionType || 'SQL', + }, + { + InputSerialization: [selectOpts.inputSerialization], + }, + { + OutputSerialization: [selectOpts.outputSerialization], + }, + ] + + // Optional + if (selectOpts.requestProgress) { + config.push({ RequestProgress: selectOpts.requestProgress }) + } + // Optional + if (selectOpts.scanRange) { + config.push({ ScanRange: selectOpts.scanRange }) + } + + const builder = new xml2js.Builder({ + rootName: 'SelectObjectContentRequest', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }, payload) + return parseSelectObjectContentResponse(await readAsBuffer(res)) + }) + } + + getObjectRetention( + bucketName: string, + objectName: string, + options: VersionIdentification, + callback: ResultCallback, + ): void + getObjectRetention(bucketName: string, objectName: string, options: VersionIdentification): Promise + + getObjectRetention( + bucketName: string, + objectName: string, + getOpts: VersionIdentification, + cb?: ResultCallback, + ): Promise | void { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(getOpts)) { + throw new errors.InvalidArgumentError('callback should be of type "object"') + } else if (getOpts.versionId && !isString(getOpts.versionId)) { + throw new errors.InvalidArgumentError('VersionID should be of type "string"') + } + if (cb && !isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + let query = 'retention' + if (getOpts.versionId) { + query += `&versionId=${getOpts.versionId}` + } + + return asCallbackFn(cb, async (): Promise => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectRetentionConfig(body.toString()) + }) + } + + getObjectTagging(bucketName: string, objectName: string, callback: ResultCallback): void + getObjectTagging( + bucketName: string, + objectName: string, + getOptions: VersionIdentification, + callback: ResultCallback, + ): void + getObjectTagging(bucketName: string, objectName: string, getOptions?: VersionIdentification): Promise + + getObjectTagging( + bucketName: string, + objectName: string, + getOptsArg?: VersionIdentification | ResultCallback, + cbArg?: ResultCallback, + ): void | Promise { + const method = 'GET' + let query = 'tagging' + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + const [[getOpts = {}], cb] = findCallback<[VersionIdentification | undefined], ResultCallback>([ + getOptsArg, + cbArg, + ]) + + if (!isObject(getOpts)) { + throw new errors.InvalidArgumentError('getOpts should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + if (getOpts && getOpts.versionId) { + query = `${query}&versionId=${getOpts.versionId}` + } + const requestOptions: RequestOption = { method, bucketName, query } + if (objectName) { + requestOptions['objectName'] = objectName + } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseTagging(body.toString()) + }) + } + + getObjectLegalHold(bucketName: string, objectName: string, callback: ResultCallback): void + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptions: VersionIdentification, + callback: ResultCallback, + ): void + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptions?: VersionIdentification, + ): Promise + + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptsArg?: VersionIdentification | ResultCallback, + cbArg?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const [[getOpts = {}], cb] = findCallback<[VersionIdentification], ResultCallback>([ + getOptsArg, + cbArg, + ]) + + if (!isObject(getOpts)) { + throw new TypeError('getOpts should be of type "Object"') + } else if (Object.keys(getOpts).length > 0 && getOpts.versionId && !isString(getOpts.versionId)) { + throw new TypeError('versionId should be of type string.:', getOpts.versionId) + } + + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + const method = 'GET' + let query = 'legal-hold' + + if (getOpts.versionId) { + query += `&versionId=${getOpts.versionId}` + } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectLegalHoldConfig(body.toString()) + }) + } + + setObjectLegalHold(bucketName: string, objectName: string, callback: NoResultCallback): void + setObjectLegalHold( + bucketName: string, + objectName: string, + setOptions: LegalHoldOptions, + callback: NoResultCallback, + ): void + setObjectLegalHold(bucketName: string, objectName: string, setOptions?: LegalHoldOptions): Promise + + setObjectLegalHold( + bucketName: string, + objectName: string, + setOptions?: LegalHoldOptions | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const defaultOpts: LegalHoldOptions = { + status: LEGAL_HOLD_STATUS.ENABLED, + } + + let [[setOpts = defaultOpts], cb] = findCallback<[LegalHoldOptions], NoResultCallback>([setOptions, callback]) + + if (!isObject(setOpts)) { + throw new TypeError('setOpts should be of type "Object"') + } else { + if (![LEGAL_HOLD_STATUS.ENABLED, LEGAL_HOLD_STATUS.DISABLED].includes(setOpts.status)) { + throw new TypeError('Invalid status: ' + setOpts.status) + } + if (setOpts.versionId && !setOpts.versionId.length) { + throw new TypeError('versionId should be of type string.:' + setOpts.versionId) + } + } + + if (!isOptionalFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + if (isEmpty(setOpts)) { + setOpts = defaultOpts + } + + const method = 'PUT' + let query = 'legal-hold' + + if (setOpts.versionId) { + query += `&versionId=${setOpts.versionId}` + } + + const config = { + Status: setOpts.status, + } + + const builder = new xml2js.Builder({ rootName: 'LegalHold', renderOpts: { pretty: false }, headless: true }) + const payload = builder.buildObject(config) + const headers = { + 'Content-MD5': toMd5(payload), + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + objectName, + query, + headers, + }, + payload, + ) + }) + } + + /** + * Internal Method to abort a multipart upload request in case of any errors. + * @param bucketName __string__ Bucket Name + * @param objectName __string__ Object Name + * @param uploadId __string__ id of a multipart upload to cancel during compose object sequence. + */ + protected async abortMultipartUpload(bucketName: string, objectName: string, uploadId: string) { + // TODO: type callback + const method = 'DELETE' + const query = `uploadId=${uploadId}` + + const requestOptions: RequestOption = { method, bucketName, objectName: objectName, query } + await this.makeRequestAsyncOmit(requestOptions, '', [204]) + } + + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + callback: NoResultCallback, + ): void + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + ): Promise + + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isArray(objectsList)) { + throw new errors.InvalidArgumentError('objectsList should be a list') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const maxEntries = 1000 + const query = 'delete' + const method = 'POST' + + type O = + | string + | { + name: string + versionId?: string + } + + const result = objectsList.reduce( + (result, entry) => { + result.list.push(entry) + if (result.list.length === maxEntries) { + result.listOfList.push(result.list) + result.list = [] + } + return result + }, + { listOfList: [] as O[][], list: [] as O[] }, + ) + + if (result.list.length > 0) { + result.listOfList.push(result.list) + } + + return asCallbackFn(cb, async () => { + for (const list of result.listOfList) { + const objects: { Key: string; VersionId?: string }[] = [] + list.forEach(function (value) { + if (typeof value === 'string') { + objects.push({ Key: value }) + } else { + objects.push({ Key: value.name, VersionId: value.versionId }) + } + }) + const deleteObjects = { Delete: { Quiet: true, Object: objects } } + const builder = new xml2js.Builder({ headless: true }) + const payload = new TextEncoder().encode(builder.buildObject(deleteObjects)) + const headers = { + ['Content-MD5']: toMd5(payload), + } + + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + } + }) + } +} + +export class Helper { + constructor(private readonly client: Client) {} + + async MultipleFileUpload( + bucketName: string, + objectName: string, + filePath: string, + metaData: MetaData = {}, + ): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + if (!isObject(metaData)) { + throw new TypeError('metaData should be of type "object"') + } + + // Inserts correct `content-type` attribute based on metaData and filePath + metaData = insertContentType(metaData, filePath) + + // Updates metaData to have the correct prefix if needed + metaData = prependXAMZMeta(metaData) + type Part = { + part: number + etag: string + } + + const executor = async (fd: number) => { + const stats = await fsp.fstat(fd) + const fileSize = stats.size + if (fileSize > this.client.maxObjectSize) { + throw new Error(`${filePath} size : ${stats.size}, max allowed size: 5TB`) + } + + if (fileSize <= this.client.partSize) { + // simple PUT request, no multipart + const uploader = this.client.getUploader(bucketName, objectName, metaData, false) + const buf = await fsp.readfile(fd) + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.client.enableSHA256) + return await uploader(buf, fileSize, sha256sum, md5sum) + } + + const previousUploadId = await this.client.findUploadId(bucketName, objectName) + let eTags: Part[] = [] + // if there was a previous incomplete upload, fetch all its uploaded parts info + let uploadId: string + if (previousUploadId) { + eTags = await this.client.listParts(bucketName, objectName, previousUploadId) + uploadId = previousUploadId + } else { + // there was no previous upload, initiate a new one + uploadId = await this.client.initiateNewMultipartUpload(bucketName, objectName, metaData) + } + + { + const partSize = this.client.calculatePartSize(fileSize) + const uploader = this.client.getUploader(bucketName, objectName, metaData, true) + // convert array to object to make things easy + const parts = eTags.reduce(function (acc, item) { + if (!acc[item.part]) { + acc[item.part] = item + } + return acc + }, {} as Record) + const partsDone: { part: number; etag: string }[] = [] + let partNumber = 1 + let uploadedSize = 0 + + // will be reused for hashing and uploading + // don't worry it's "unsafe", we will read data from fs to fill it + const buf = Buffer.allocUnsafe(this.client.partSize) + while (uploadedSize < fileSize) { + const part = parts[partNumber] + let length = partSize + if (length > fileSize - uploadedSize) { + length = fileSize - uploadedSize + } + + await fsp.read(fd, buf, 0, length, 0) + const { md5sum, sha256sum } = transformers.hashBinary(buf.subarray(0, length), this.client.enableSHA256) + + const md5sumHex = Buffer.from(md5sum, 'base64').toString('hex') + + if (part && md5sumHex === part.etag) { + // md5 matches, chunk already uploaded + partsDone.push({ part: partNumber, etag: part.etag }) + partNumber++ + uploadedSize += length + continue + } + + const objInfo = await uploader(uploadId, partNumber, buf.subarray(0, length), length, sha256sum, md5sum) + partsDone.push({ part: partNumber, etag: objInfo.etag }) + partNumber++ + uploadedSize += length + } + eTags = partsDone + } + + // at last, finish uploading + return this.client.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + } + + const ensureFileClose = async (executor: (fd: number) => Promise) => { + let fd + try { + fd = await fsp.open(filePath, 'r') + } catch (e) { + throw new Error(`failed to open file ${filePath}: err ${e}`, { cause: e }) + } + + try { + // make sure to keep await, otherwise file will be closed early. + return await executor(fd) + } finally { + await fsp.fclose(fd) + } + } + + return ensureFileClose(executor) + } +} diff --git a/src/typed-client2.ts b/src/typed-client2.ts new file mode 100644 index 00000000..dceb2734 --- /dev/null +++ b/src/typed-client2.ts @@ -0,0 +1,941 @@ +import * as stream from 'node:stream' + +import async from 'async' +import _ from 'lodash' +import xml2js from 'xml2js' + +import { asCallback, asCallbackFn } from './as-callback.ts' +import { fsp } from './async.ts' +import type { RequestOption } from './client.ts' +import { findCallback, uploadStream } from './client.ts' +import { CopyConditions } from './copyConditions.ts' +import * as errors from './errors.ts' +import type { MetaData } from './helpers.ts' +import { + calculateEvenSplits, + CopyDestinationOptions, + CopySourceOptions, + extractMetadata, + getSourceVersionId, + getVersionId, + isArray, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, + isValidBucketName, + isValidObjectName, + isValidPrefix, + PART_CONSTRAINTS, + partsRequired, + pipesetup, + prependXAMZMeta, + readableStream, + RETENTION_MODES, + RETENTION_VALIDITY_UNITS, + sanitizeETag, + toMd5, + uriEscape, + uriResourceEscape, +} from './helpers.ts' +import type { NotificationEvent } from './notification.ts' +import { NotificationConfig, NotificationPoller } from './notification.ts' +import { readAsBuffer } from './response.ts' +import * as transformers from './transformers.ts' +import type { + BucketItemCopy, + NoResultCallback, + RequestHeaders, + ResultCallback, + SourceObjectStats, + UploadedObjectInfo, +} from './type.ts' +import { TypedClient } from './typed-client.ts' +import type { ObjectLockConfig, S3ListObject } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' + +type PartConfig = { + bucketName: string + objectName: string + uploadID: string + partNumber: number + headers: RequestHeaders +} + +export class TypedClient2 extends TypedClient { + // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object + protected copyObjectV1( + bucketName: string, + objectName: string, + srcObject: string, + arg4: unknown, + arg5: unknown, + ): Promise | void { + const [[conditions = null], cb] = findCallback<[CopyConditions | null], ResultCallback>([ + arg4, + arg5, + ]) + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(srcObject)) { + throw new TypeError('srcObject should be of type "string"') + } + if (srcObject === '') { + throw new errors.InvalidPrefixError(`Empty source prefix`) + } + + if (conditions !== null && !(conditions instanceof CopyConditions)) { + throw new TypeError('conditions should be of type "CopyConditions"') + } + + const headers: RequestHeaders = {} + headers['x-amz-copy-source'] = uriResourceEscape(srcObject) + + if (conditions !== null) { + if (conditions.modified !== '') { + headers['x-amz-copy-source-if-modified-since'] = conditions.modified + } + if (conditions.unmodified !== '') { + headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified + } + if (conditions.matchETag !== '') { + headers['x-amz-copy-source-if-match'] = conditions.matchETag + } + if (conditions.matchETagExcept !== '') { + headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept + } + } + + const method = 'PUT' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, headers }) + const body = await readAsBuffer(res) + return xmlParsers.parseCopyObject(body.toString()) + }) + } + + /** + * Internal Method to perform copy of an object. + * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions + * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions + * @param cb __function__ called with null if there is an error + * @returns Promise if no callack is passed. + */ + protected copyObjectV2( + sourceConfig: CopySourceOptions, + destConfig: CopyDestinationOptions, + cb?: ResultCallback, + ): Promise | void | false { + if (!(sourceConfig instanceof CopySourceOptions)) { + throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') + } + if (!(destConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + if (!destConfig.validate()) { + return false + } + if (!destConfig.validate()) { + return false + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) + + const bucketName = destConfig.Bucket + const objectName = destConfig.Object + + const method = 'PUT' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, headers }) + const body = await readAsBuffer(res) + const data = xmlParsers.parseCopyObject(body.toString()) + + const resHeaders = res.headers + + return { + Bucket: destConfig.Bucket, + Key: destConfig.Object, + LastModified: data.lastModified, + lastModified: data.lastModified, + MetaData: extractMetadata(resHeaders), + VersionId: getVersionId(resHeaders), + SourceVersionId: getSourceVersionId(resHeaders), + Etag: sanitizeETag(resHeaders.etag), + etag: sanitizeETag(resHeaders.etag), + Size: parseInt(resHeaders['content-length']!), + } as BucketItemCopy + }) + } + + copyObject( + bucketName: string, + objectName: string, + sourceObject: string, + conditions: CopyConditions, + callback: ResultCallback, + ): void + copyObject( + bucketName: string, + objectName: string, + sourceObject: string, + conditions: CopyConditions, + ): Promise + + // Backward compatibility for Copy Object API. + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + copyObject(...allArgs): Promise | void | false { + if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObjectV2(...allArgs) + } + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObjectV1(...allArgs) + } + + async uploadPartCopy(partConfig: PartConfig) { + const { bucketName, objectName, uploadID, partNumber, headers } = partConfig + + const method = 'PUT' + const query = `uploadId=${uploadID}&partNumber=${partNumber}` + const requestOptions: RequestOption = { method, bucketName, objectName: objectName, query, headers } + + const res = await this.makeRequestAsync(requestOptions) + + const body = await readAsBuffer(res) + + const data = xmlParsers.uploadPartParser(body.toString()) + + return { + etag: sanitizeETag(data.ETag), + key: objectName, + part: partNumber, + } + } + + // composeObject( + // destObjConfig: CopyDestinationOptions, + // sourceObjList: CopySourceOptions[], + // callback: ResultCallback, + // ): void + // composeObject(destObjConfig: CopyDestinationOptions, sourceObjList: CopySourceOptions[]): Promise + + composeObject( + destObjConfig: CopyDestinationOptions, + sourceObjList: CopySourceOptions[], + cb?: ResultCallback, + ): unknown { + const me = this // many async flows. so store the ref. + const sourceFilesLength = sourceObjList.length + + if (!isArray(sourceObjList)) { + throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') + } + if (!(destObjConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + + if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, + ) + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + for (let i = 0; i < sourceFilesLength; i++) { + // @ts-expect-error index check + if (!sourceObjList[i].validate()) { + return false + } + } + + if (!destObjConfig.validate()) { + return false + } + + const getStatOptions = (srcConfig: CopySourceOptions) => { + let statOpts = {} + if (!isEmpty(srcConfig.VersionID)) { + statOpts = { + versionId: srcConfig.VersionID, + } + } + return statOpts + } + const srcObjectSizes: number[] = [] + let totalSize = 0 + let totalParts = 0 + + const sourceObjStats = sourceObjList.map((srcItem) => + me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), + ) + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return asCallback(cb, async () => { + const srcObjectInfos = await Promise.all(sourceObjStats) + const validatedStats = srcObjectInfos.map((resItemStat, index) => { + const srcConfig = sourceObjList[index] + + let srcCopySize = resItemStat.size + // Check if a segment is specified, and if so, is the + // segment within object bounds? + // @ts-expect-error index check + if (srcConfig.MatchRange) { + // Since range is specified, + // 0 <= src.srcStart <= src.srcEnd + // so only invalid case to check is: + // @ts-expect-error index check + const srcStart = srcConfig.Start + // @ts-expect-error index check + const srcEnd = srcConfig.End + if (srcEnd >= srcCopySize || srcStart < 0) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, + ) + } + srcCopySize = srcEnd - srcStart + 1 + } + + // Only the last source may be less than `absMinPartSize` + if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, + ) + } + + // Is data to copy too large? + totalSize += srcCopySize + if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { + throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) + } + + // record source size + srcObjectSizes[index] = srcCopySize + + // calculate parts needed for current source + totalParts += partsRequired(srcCopySize) + // Do we need more parts than we are allowed? + if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, + ) + } + + return resItemStat + }) + + if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObject(sourceObjList[0], destObjConfig) // use copyObjectV2 + } + + // preserve etag to avoid modification of object while copying. + for (let i = 0; i < sourceFilesLength; i++) { + // @ts-expect-error index check + sourceObjList[i].MatchETag = validatedStats[i].etag + } + + const newUploadHeaders = destObjConfig.getHeaders() + + const uploadId = await me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders) + + const uploadList = validatedStats + .map((resItemStat, idx) => { + // @ts-expect-error index check + return calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) + }) + .flatMap((splitSize, splitIndex) => { + if (splitSize === null) { + throw new Error('BUG: splitSize === 0') + } + + const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize + + const partIndex = splitIndex + 1 // part index starts from 1. + const totalUploads = Array.from(startIdx) + + // @ts-expect-error index check + const headers = sourceObjList[splitIndex].getHeaders() + + return totalUploads.map((splitStart, upldCtrIdx) => { + const splitEnd = endIdx[upldCtrIdx] + + const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` + headers['x-amz-copy-source'] = `${sourceObj}` + headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` + + return { + bucketName: destObjConfig.Bucket, + objectName: destObjConfig.Object, + uploadID: uploadId, + partNumber: partIndex, + headers: headers, + sourceObj: sourceObj, + } as PartConfig + }) + }) + + try { + const rr = await async.map(uploadList, async (o: PartConfig) => me.uploadPartCopy(o)) + const partsDone = rr.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) + return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone) + } catch (e) { + await this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId) + throw e + } + }) + } + + setObjectLockConfig( + bucketName: string, + lockConfigOpts: ObjectLockConfig = {}, + cb?: NoResultCallback, + ): void | Promise { + const retentionModes = [RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE] + const validUnits = [RETENTION_VALIDITY_UNITS.DAYS, RETENTION_VALIDITY_UNITS.YEARS] + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + if (lockConfigOpts.mode && !retentionModes.includes(lockConfigOpts.mode)) { + throw new TypeError(`lockConfigOpts.mode should be one of ${retentionModes}`) + } + if (lockConfigOpts.unit && !validUnits.includes(lockConfigOpts.unit)) { + throw new TypeError(`lockConfigOpts.unit should be one of ${validUnits}`) + } + if (lockConfigOpts.validity && !isNumber(lockConfigOpts.validity)) { + throw new TypeError(`lockConfigOpts.validity should be a number`) + } + + const method = 'PUT' + const query = 'object-lock' + + const config: { ObjectLockEnabled: string; Rule?: { DefaultRetention: Record } } = { + ObjectLockEnabled: 'Enabled', + } + const configKeys = Object.keys(lockConfigOpts) + // Check if keys are present and all keys are present. + if (configKeys.length > 0) { + if (_.difference(configKeys, ['unit', 'mode', 'validity']).length !== 0) { + throw new TypeError( + `lockConfigOpts.mode,lockConfigOpts.unit,lockConfigOpts.validity all the properties should be specified.`, + ) + } else { + config.Rule = { + DefaultRetention: {}, + } + if (lockConfigOpts.mode) { + config.Rule.DefaultRetention.Mode = lockConfigOpts.mode + } + if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.DAYS) { + config.Rule.DefaultRetention.Days = lockConfigOpts.validity + } else if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.YEARS) { + config.Rule.DefaultRetention.Years = lockConfigOpts.validity + } + } + } + + const builder = new xml2js.Builder({ + rootName: 'ObjectLockConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + + const headers: RequestHeaders = {} + headers['Content-MD5'] = toMd5(payload) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + getObjectLockConfig( + bucketName: string, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'object-lock' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectLockConfig(body.toString()) + }) + } + + removeBucketEncryption(bucketName: string, cb: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'DELETE' + const query = 'encryption' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [204]) + }) + } + + setBucketReplication( + bucketName: string, + replicationConfig: { + role?: string + rules?: unknown + } = {}, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(replicationConfig)) { + throw new errors.InvalidArgumentError('replicationConfig should be of type "object"') + } else { + if (isEmpty(replicationConfig.role)) { + throw new errors.InvalidArgumentError('Role cannot be empty') + } else if (replicationConfig.role && !isString(replicationConfig.role)) { + throw new errors.InvalidArgumentError('Invalid value for role', replicationConfig.role) + } + if (isEmpty(replicationConfig.rules)) { + throw new errors.InvalidArgumentError('Minimum one replication rule must be specified') + } + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'PUT' + const query = 'replication' + const headers: RequestHeaders = {} + + const replicationParamsConfig = { + ReplicationConfiguration: { + Role: replicationConfig.role, + Rule: replicationConfig.rules, + }, + } + + const builder = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true }) + + const payload = builder.buildObject(replicationParamsConfig) + + headers['Content-MD5'] = toMd5(payload) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + getBucketReplication(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'replication' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseReplicationConfig(body.toString()) + }) + } + + removeBucketReplication(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'DELETE' + const query = 'replication' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + }, + '', + [200, 204], + ) + }) + } + + removeAllBucketNotification(bucketName: string, cb?: NoResultCallback) { + return this.setBucketNotification(bucketName, new NotificationConfig(), cb) + } + + // in the S3 provider + getBucketNotification(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'GET' + const query = 'notification' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketNotification(body.toString()) + }) + } + + // Listens for bucket notifications. Returns an EventEmitter. + listenBucketNotification(bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix must be of type string') + } + if (!isString(suffix)) { + throw new TypeError('suffix must be of type string') + } + if (!isArray(events)) { + throw new TypeError('events must be of type Array') + } + const listener = new NotificationPoller(this, bucketName, prefix, suffix, events) + listener.start() + + return listener + } + + // Remove all the notification configurations in the S3 provider + setBucketNotification(bucketName: string, config: NotificationConfig, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(config)) { + throw new TypeError('notification config should be of type "Object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'PUT' + const query = 'notification' + const builder = new xml2js.Builder({ + rootName: 'NotificationConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, payload) + }) + } + + // * `obj.lastModified` _Date_: modified time stamp + listObjectsV2(bucketName: string, prefix: string, recursive?: boolean, startAfter?: string) { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (startAfter === undefined) { + startAfter = '' + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + // if recursive is false set delimiter to '/' + const delimiter = recursive ? '' : '/' + let continuationToken = '' + let objects: S3ListObject[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + // if there are no objects to push do query for the next batch of objects + this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter!) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + if (result.isTruncated) { + continuationToken = result.nextContinuationToken + } else { + ended = true + } + objects = result.objects + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + readStream._read() + }) + } + return readStream + } + + // List the objects in the bucket using S3 ListObjects V2 + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) + // + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + + // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. + listObjectsV2Query( + bucketName: string, + prefix: string, + continuationToken: string, + delimiter: string, + maxKeys: number, + startAfter: string, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(continuationToken)) { + throw new TypeError('continuationToken should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + if (!isNumber(maxKeys)) { + throw new TypeError('maxKeys should be of type "number"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + const queries = [] + + // Call for listing objects v2 API + queries.push(`list-type=2`) + queries.push(`encoding-type=url`) + + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + + if (continuationToken) { + continuationToken = uriEscape(continuationToken) + queries.push(`continuation-token=${continuationToken}`) + } + // Set start-after + if (startAfter) { + startAfter = uriEscape(startAfter) + queries.push(`start-after=${startAfter}`) + } + // no need to escape maxKeys + if (maxKeys) { + if (maxKeys >= 1000) { + maxKeys = 1000 + } + queries.push(`max-keys=${maxKeys}`) + } + queries.sort() + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + const method = 'GET' + const transformer = transformers.getListObjectsV2Transformer() + this.makeRequestAsync({ method, bucketName, query }, '', [200], '', true).then( + (response) => { + pipesetup(response, transformer) + }, + (e) => { + return transformer.emit('error', e) + }, + ) + return transformer + } + + // Copy the object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `srcObject` _string_: path of the source object to be copied + // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) + + // * `versionId` _string_: versionId of the object + putObject( + bucketName: string, + objectName: string, + stream: string | Buffer | stream.Readable, + sizeArg?: number, + metaDataArg?: MetaData, + callbackArg?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let [[size, metaData = {}], callback] = findCallback< + [number | undefined, MetaData], + ResultCallback + >([sizeArg, metaDataArg, callbackArg]) + + // We'll need to shift arguments to the left because of metaData + // and size being optional. + if (isObject(size)) { + metaData = size + size = undefined + } + + // Ensures Metadata has appropriate prefix for A3 API + metaData = prependXAMZMeta(metaData) + if (typeof stream === 'string' || stream instanceof Buffer) { + // Adapts the non-stream interface into a stream. + if (size !== undefined) { + if (size !== Buffer.from(stream).length) { + throw new errors.InvalidArgumentError( + `size input and object length mismatch, object has length ${stream.length} but input size is ${size}`, + ) + } + } + size = Buffer.from(stream).length + } else if (!isReadableStream(stream)) { + throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"') + } + + if (!isOptionalFunction(callback)) { + throw new TypeError('callback should be of type "function"') + } + + if (isNumber(size) && size < 0) { + throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`) + } + + if (isNumber(size) && size > this.maxObjectSize) { + throw new TypeError(`size should not be more than ${this.maxObjectSize}`) + } + + const executor = async () => { + // Get the part size and forward that to the BlockStream. Default to the + // largest block size possible if necessary. + if (size === undefined) { + const statSize = await getContentLength(stream) + if (statSize !== null) { + size = statSize + } + } + + if (!isNumber(size)) { + // Backward compatibility + size = this.maxObjectSize + } + + const partSize = this.calculatePartSize(size) + + if (typeof stream === 'string' || Buffer.isBuffer(stream) || size <= this.partSize) { + const uploader = this.getUploader(bucketName, objectName, metaData, false) + const buf = isReadableStream(stream) ? await readAsBuffer(stream) : Buffer.from(stream) + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.enableSHA256) + return uploader(buf, buf.length, sha256sum, md5sum) + } + + return uploadStream({ + client: this, + stream: isReadableStream(stream) ? stream : readableStream(stream), + partSize, + bucketName, + objectName, + metaData, + }) + } + + return asCallback(callback, executor()) + } +} + +async function getContentLength(s: stream.Readable | Buffer | string): Promise { + const length = (s as unknown as Record).length as number | undefined + if (isNumber(length)) { + return length + } + + // property of fs.ReadStream + const filePath = (s as unknown as Record).path as string | undefined + if (filePath) { + const stat = await fsp.lstat(filePath) + return stat.size + } + + // property of fs.ReadStream + const fd = (s as unknown as Record).fd as number | null | undefined + + if (fd) { + const stat = await fsp.fstat(fd) + return stat.size + } + + return null +} diff --git a/src/upload.ts b/src/upload.ts new file mode 100644 index 00000000..e69de29b diff --git a/src/xml-parsers.js b/src/xml-parsers.ts similarity index 59% rename from src/xml-parsers.js rename to src/xml-parsers.ts index 447ec898..c1ae330e 100644 --- a/src/xml-parsers.js +++ b/src/xml-parsers.ts @@ -14,52 +14,52 @@ * limitations under the License. */ -import crc32 from 'buffer-crc32' +import * as newCrc32 from 'crc-32' import { XMLParser } from 'fast-xml-parser' -import _ from 'lodash' import * as errors from './errors.ts' +import type { MetaData, RETENTION_MODES } from './helpers.ts' import { isObject, parseXml, - readableStream, RETENTION_VALIDITY_UNITS, sanitizeETag, sanitizeObjectKey, SelectResults, toArray, -} from './helpers.js' +} from './helpers.ts' +import type { BucketItemCopy, BucketItemFromList, Retention, UploadID } from './type.ts' -// Parse XML and return information as Javascript types const fxp = new XMLParser() +// Parse XML and return information as Javascript types // parse error XML response -export function parseError(xml, headerInfo) { - var xmlErr = {} - var xmlObj = fxp.parse(xml) +export function parseError(xml: string, headerInfo: Record) { + let xmlErr = {} + const xmlObj = fxp.parse(xml) if (xmlObj.Error) { xmlErr = xmlObj.Error } - var e = new errors.S3Error() - _.each(xmlErr, (value, key) => { + const e = new errors.S3Error() as unknown as Record + Object.entries(xmlErr).forEach(([key, value]) => { e[key.toLowerCase()] = value }) - _.each(headerInfo, (value, key) => { + Object.entries(headerInfo).forEach(([key, value]) => { e[key] = value }) + return e } // parse XML response for copy object -export function parseCopyObject(xml) { - var result = { +export function parseCopyObject(xml: string): BucketItemCopy { + const result: { etag: string; lastModified?: Date } = { etag: '', - lastModified: '', } - var xmlobj = parseXml(xml) + let xmlobj = parseXml(xml) if (!xmlobj.CopyObjectResult) { throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"') } @@ -80,14 +80,23 @@ export function parseCopyObject(xml) { } // parse XML response for listing in-progress multipart uploads -export function parseListMultipart(xml) { - var result = { - uploads: [], - prefixes: [], +export function parseListMultipart(xml: string) { + const result = { + uploads: [] as { + key: string + uploadId: UploadID + initiator: unknown + owner: unknown + storageClass: unknown + initiated: unknown + }[], + prefixes: [] as { prefix: string }[], isTruncated: false, + nextKeyMarker: undefined, + nextUploadIdMarker: undefined, } - var xmlobj = parseXml(xml) + let xmlobj = parseXml(xml) if (!xmlobj.ListMultipartUploadsResult) { throw new errors.InvalidXMLError('Missing tag: "ListMultipartUploadsResult"') @@ -105,18 +114,19 @@ export function parseListMultipart(xml) { if (xmlobj.CommonPrefixes) { toArray(xmlobj.CommonPrefixes).forEach((prefix) => { - result.prefixes.push({ prefix: sanitizeObjectKey(toArray(prefix.Prefix)[0]) }) + // @ts-expect-error index check + result.prefixes.push({ prefix: sanitizeObjectKey(toArray(prefix.Prefix)[0]) }) }) } if (xmlobj.Upload) { toArray(xmlobj.Upload).forEach((upload) => { - var key = upload.Key - var uploadId = upload.UploadId - var initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } - var owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } - var storageClass = upload.StorageClass - var initiated = new Date(upload.Initiated) + const key = upload.Key + const uploadId = upload.UploadId + const initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } + const owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } + const storageClass = upload.StorageClass + const initiated = new Date(upload.Initiated) result.uploads.push({ key, uploadId, initiator, owner, storageClass, initiated }) }) } @@ -124,9 +134,9 @@ export function parseListMultipart(xml) { } // parse XML response to list all the owned buckets -export function parseListBucket(xml) { - var result = [] - var xmlobj = parseXml(xml) +export function parseListBucket(xml: string): BucketItemFromList[] { + const result: BucketItemFromList[] = [] + let xmlobj = parseXml(xml) if (!xmlobj.ListAllMyBucketsResult) { throw new errors.InvalidXMLError('Missing tag: "ListAllMyBucketsResult"') @@ -136,8 +146,8 @@ export function parseListBucket(xml) { if (xmlobj.Buckets) { if (xmlobj.Buckets.Bucket) { toArray(xmlobj.Buckets.Bucket).forEach((bucket) => { - var name = bucket.Name - var creationDate = new Date(bucket.CreationDate) + const name = bucket.Name + const creationDate = new Date(bucket.CreationDate) result.push({ name, creationDate }) }) } @@ -146,33 +156,31 @@ export function parseListBucket(xml) { } // parse XML response for bucket notification -export function parseBucketNotification(xml) { - var result = { - TopicConfiguration: [], - QueueConfiguration: [], - CloudFunctionConfiguration: [], +export function parseBucketNotification(xml: string): any { + const result = { + TopicConfiguration: [] as unknown[], + QueueConfiguration: [] as unknown[], + CloudFunctionConfiguration: [] as unknown[], } // Parse the events list - var genEvents = function (events) { - var result = [] + const genEvents = function (events: any) { + const result = [] if (events) { - toArray(events).forEach((s3event) => { - result.push(s3event) - }) + result.push(...toArray(events)) } return result } // Parse all filter rules - var genFilterRules = function (filters) { - var result = [] + const genFilterRules = function (filters: any) { + const result: { Name: string; Value: string }[] = [] if (filters) { filters = toArray(filters) if (filters[0].S3Key) { filters[0].S3Key = toArray(filters[0].S3Key) if (filters[0].S3Key[0].FilterRule) { toArray(filters[0].S3Key[0].FilterRule).forEach((rule) => { - var Name = toArray(rule.Name)[0] - var Value = toArray(rule.Value)[0] + const Name = toArray(rule.Name)[0] + const Value = toArray(rule.Value)[0] result.push({ Name, Value }) }) } @@ -181,36 +189,36 @@ export function parseBucketNotification(xml) { return result } - var xmlobj = parseXml(xml) + let xmlobj = parseXml(xml) xmlobj = xmlobj.NotificationConfiguration // Parse all topic configurations in the xml if (xmlobj.TopicConfiguration) { toArray(xmlobj.TopicConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var Topic = toArray(config.Topic)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) + const Id = toArray(config.Id)[0] + const Topic = toArray(config.Topic)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) result.TopicConfiguration.push({ Id, Topic, Event, Filter }) }) } // Parse all topic configurations in the xml if (xmlobj.QueueConfiguration) { toArray(xmlobj.QueueConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var Queue = toArray(config.Queue)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) + const Id = toArray(config.Id)[0] + const Queue = toArray(config.Queue)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) result.QueueConfiguration.push({ Id, Queue, Event, Filter }) }) } // Parse all QueueConfiguration arrays if (xmlobj.CloudFunctionConfiguration) { toArray(xmlobj.CloudFunctionConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var CloudFunction = toArray(config.CloudFunction)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) + const Id = toArray(config.Id)[0] + const CloudFunction = toArray(config.CloudFunction)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) result.CloudFunctionConfiguration.push({ Id, CloudFunction, Event, Filter }) }) } @@ -219,18 +227,24 @@ export function parseBucketNotification(xml) { } // parse XML response for bucket region -export function parseBucketRegion(xml) { +export function parseBucketRegion(xml: string) { // return region information return parseXml(xml).LocationConstraint } +export type Part = { + part: number + lastModified?: Date + etag: string +} + // parse XML response for list parts of an in progress multipart upload -export function parseListParts(xml) { - var xmlobj = parseXml(xml) - var result = { +export function parseListParts(xml: string): { isTruncated: boolean; marker: number | undefined; parts: Part[] } { + let xmlobj = parseXml(xml) + const result: { isTruncated: boolean; marker: number | undefined; parts: Part[] } = { isTruncated: false, parts: [], - marker: undefined, + marker: undefined as number | undefined, } if (!xmlobj.ListPartsResult) { throw new errors.InvalidXMLError('Missing tag: "ListPartsResult"') @@ -240,13 +254,13 @@ export function parseListParts(xml) { result.isTruncated = xmlobj.IsTruncated } if (xmlobj.NextPartNumberMarker) { - result.marker = +toArray(xmlobj.NextPartNumberMarker)[0] + result.marker = toArray(xmlobj.NextPartNumberMarker)[0] } if (xmlobj.Part) { toArray(xmlobj.Part).forEach((p) => { - var part = +toArray(p.PartNumber)[0] - var lastModified = new Date(p.LastModified) - var etag = p.ETag.replace(/^"/g, '') + const part = +toArray(p.PartNumber)[0] + const lastModified = new Date(p.LastModified) + const etag = p.ETag.replace(/^"/g, '') .replace(/"$/g, '') .replace(/^"/g, '') .replace(/"$/g, '') @@ -259,8 +273,8 @@ export function parseListParts(xml) { } // parse XML response when a new multipart upload is initiated -export function parseInitiateMultipart(xml) { - var xmlobj = parseXml(xml) +export function parseInitiateMultipart(xml: string) { + let xmlobj = parseXml(xml) if (!xmlobj.InitiateMultipartUploadResult) { throw new errors.InvalidXMLError('Missing tag: "InitiateMultipartUploadResult"') @@ -273,14 +287,24 @@ export function parseInitiateMultipart(xml) { throw new errors.InvalidXMLError('Missing tag: "UploadId"') } +export type MultipartResult = + | { errCode: string; errMessage: string } + | { + errCode?: undefined // this help TS to narrow type + etag: string + key: string + bucket: string + location: string + } + // parse XML response when a multipart upload is completed -export function parseCompleteMultipart(xml) { - var xmlobj = parseXml(xml).CompleteMultipartUploadResult +export function parseCompleteMultipart(xml: string) { + const xmlobj = parseXml(xml).CompleteMultipartUploadResult if (xmlobj.Location) { - var location = toArray(xmlobj.Location)[0] - var bucket = toArray(xmlobj.Bucket)[0] - var key = xmlobj.Key - var etag = xmlobj.ETag.replace(/^"/g, '') + const location = toArray(xmlobj.Location)[0] + const bucket = toArray(xmlobj.Bucket)[0] + const key = xmlobj.Key + const etag = xmlobj.ETag.replace(/^"/g, '') .replace(/"$/g, '') .replace(/^"/g, '') .replace(/"$/g, '') @@ -291,20 +315,31 @@ export function parseCompleteMultipart(xml) { } // Complete Multipart can return XML Error after a 200 OK response if (xmlobj.Code && xmlobj.Message) { - var errCode = toArray(xmlobj.Code)[0] - var errMessage = toArray(xmlobj.Message)[0] + const errCode = toArray(xmlobj.Code)[0] + const errMessage = toArray(xmlobj.Message)[0] return { errCode, errMessage } } } -const formatObjInfo = (content, opts = {}) => { - let { Key, LastModified, ETag, Size, VersionId, IsLatest } = content +type ListedObject = { + Key: string + LastModified: string + ETag: string + Size: number + VersionId?: string + IsLatest?: boolean +} + +const formatObjInfo = (content: ListedObject, opts: { IsDeleteMarker?: boolean } = {}) => { + const { Key, LastModified, ETag, Size, VersionId, IsLatest } = content if (!isObject(opts)) { opts = {} } + // @ts-expect-error index check const name = sanitizeObjectKey(toArray(Key)[0]) + // @ts-expect-error index check const lastModified = new Date(toArray(LastModified)[0]) const etag = sanitizeETag(toArray(ETag)[0]) @@ -319,17 +354,52 @@ const formatObjInfo = (content, opts = {}) => { } } +export type S3ListObject = + | { prefix: string; size: number } + | { name: string; size: number } // sometime api return this, not sure if it's valid + | { + name: string + lastModified: Date + etag: string + size: number + isDeleteMarker?: boolean + isLatest?: boolean + } + +type ListObjectResponse = { + nextMarker?: string + versionIdMarker?: string + objects: S3ListObject[] + isTruncated: boolean + nextContinuationToken?: string +} + // parse XML response for list objects in a bucket -export function parseListObjects(xml) { - var result = { +export function parseListObjects(xml: string) { + const result: ListObjectResponse = { objects: [], isTruncated: false, } let isTruncated = false let nextMarker, nextVersionKeyMarker - const xmlobj = parseXml(xml) + const xmlobj = parseXml(xml) as { + ListBucketResult?: { + CommonPrefixes: { Prefix: string } + IsTruncated: boolean + NextMarker?: string + Contents: Array<{ Key: string; LastModified: string; ETag: string; Size: number }> + } + ListVersionsResult?: { + CommonPrefixes: unknown + NextKeyMarker?: string + NextVersionIdMarker?: string + Version: Array + DeleteMarker?: Array + IsTruncated: boolean + } + } - const parseCommonPrefixesEntity = (responseEntity) => { + const parseCommonPrefixesEntity = (responseEntity: any) => { if (responseEntity) { toArray(responseEntity).forEach((commonPrefix) => { result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) @@ -337,7 +407,9 @@ export function parseListObjects(xml) { } } + // https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjects.html const listBucketResult = xmlobj.ListBucketResult + // https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectVersions.html const listVersionsResult = xmlobj.ListVersionsResult if (listBucketResult) { @@ -346,9 +418,9 @@ export function parseListObjects(xml) { } if (listBucketResult.Contents) { toArray(listBucketResult.Contents).forEach((content) => { - const name = sanitizeObjectKey(toArray(content.Key)[0]) - const lastModified = new Date(toArray(content.LastModified)[0]) - const etag = sanitizeETag(toArray(content.ETag)[0]) + const name = sanitizeObjectKey(content.Key) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) const size = content.Size result.objects.push({ name, lastModified, etag, size }) }) @@ -393,12 +465,25 @@ export function parseListObjects(xml) { } // parse XML response for list objects v2 in a bucket -export function parseListObjectsV2(xml) { - var result = { +export function parseListObjectsV2(xml: string) { + const result: { + objects: ( + | { prefix: string; size: number } + | { + name: string + lastModified: Date + etag: string + size: number + } + )[] + isTruncated: boolean + nextContinuationToken?: string + } = { objects: [], isTruncated: false, } - var xmlobj = parseXml(xml) + + let xmlobj = parseXml(xml) if (!xmlobj.ListBucketResult) { throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') } @@ -411,10 +496,10 @@ export function parseListObjectsV2(xml) { } if (xmlobj.Contents) { toArray(xmlobj.Contents).forEach((content) => { - var name = sanitizeObjectKey(toArray(content.Key)[0]) - var lastModified = new Date(content.LastModified) - var etag = sanitizeETag(content.ETag) - var size = content.Size + const name = sanitizeObjectKey(toArray(content.Key)[0]) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) + const size = content.Size result.objects.push({ name, lastModified, etag, size }) }) } @@ -426,13 +511,26 @@ export function parseListObjectsV2(xml) { return result } -// parse XML response for list objects v2 with metadata in a bucket -export function parseListObjectsV2WithMetadata(xml) { - var result = { +export function parseListObjectsV2WithMetadata(xml: string) { + const result: { + objects: ( + | { prefix: string; size: number } + | { + name: string + lastModified: Date + etag: string + size: number + metadata: MetaData | null + } + )[] + isTruncated: boolean + nextContinuationToken?: string + } = { objects: [], isTruncated: false, } - var xmlobj = parseXml(xml) + + let xmlobj = parseXml(xml) if (!xmlobj.ListBucketResult) { throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') } @@ -446,11 +544,11 @@ export function parseListObjectsV2WithMetadata(xml) { if (xmlobj.Contents) { toArray(xmlobj.Contents).forEach((content) => { - var name = sanitizeObjectKey(content.Key) - var lastModified = new Date(content.LastModified) - var etag = sanitizeETag(content.ETag) - var size = content.Size - var metadata + const name = sanitizeObjectKey(content.Key) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) + const size = content.Size + let metadata if (content.UserMetadata != null) { metadata = toArray(content.UserMetadata)[0] } else { @@ -468,12 +566,12 @@ export function parseListObjectsV2WithMetadata(xml) { return result } -export function parseBucketVersioningConfig(xml) { - var xmlObj = parseXml(xml) +export function parseBucketVersioningConfig(xml: string) { + const xmlObj = parseXml(xml) return xmlObj.VersioningConfiguration } -export function parseTagging(xml) { +export function parseTagging(xml: string) { const xmlObj = parseXml(xml) let result = [] if (xmlObj.Tagging && xmlObj.Tagging.TagSet && xmlObj.Tagging.TagSet.Tag) { @@ -488,14 +586,21 @@ export function parseTagging(xml) { return result } -export function parseLifecycleConfig(xml) { +export function parseLifecycleConfig(xml: string) { const xmlObj = parseXml(xml) return xmlObj.LifecycleConfiguration } -export function parseObjectLockConfig(xml) { +export type ObjectLockConfig = { + mode?: keyof typeof RETENTION_MODES + objectLockEnabled?: 'Enabled' + unit?: 'Years' | 'Days' + validity?: number +} + +export function parseObjectLockConfig(xml: string): ObjectLockConfig | undefined { const xmlObj = parseXml(xml) - let lockConfigResult = {} + let lockConfigResult: ObjectLockConfig = {} if (xmlObj.ObjectLockConfiguration) { lockConfigResult = { objectLockEnabled: xmlObj.ObjectLockConfiguration.ObjectLockEnabled, @@ -523,43 +628,45 @@ export function parseObjectLockConfig(xml) { } } -export function parseObjectRetentionConfig(xml) { +export function parseObjectRetentionConfig(xml: string) { const xmlObj = parseXml(xml) const retentionConfig = xmlObj.Retention return { mode: retentionConfig.Mode, retainUntilDate: retentionConfig.RetainUntilDate, - } + } as Retention } -export function parseBucketEncryptionConfig(xml) { - let encConfig = parseXml(xml) - return encConfig +export function parseBucketEncryptionConfig(xml: string) { + return parseXml(xml) } -export function parseReplicationConfig(xml) { + +export function parseReplicationConfig(xml: string) { const xmlObj = parseXml(xml) + const replicationConfig = { ReplicationConfiguration: { role: xmlObj.ReplicationConfiguration.Role, rules: toArray(xmlObj.ReplicationConfiguration.Rule), }, } + return replicationConfig } -export function parseObjectLegalHoldConfig(xml) { +export function parseObjectLegalHoldConfig(xml: string) { const xmlObj = parseXml(xml) return xmlObj.LegalHold } -export function uploadPartParser(xml) { +export function uploadPartParser(xml: string) { const xmlObj = parseXml(xml) const respEl = xmlObj.CopyPartResult return respEl } -export function removeObjectsParser(xml) { +export function removeObjectsParser(xml: string) { const xmlObj = parseXml(xml) if (xmlObj.DeleteResult && xmlObj.DeleteResult.Error) { // return errors as array always. as the response is object in case of single object passed in removeObjects @@ -568,39 +675,56 @@ export function removeObjectsParser(xml) { return [] } -export function parseSelectObjectContentResponse(res) { +class ReadableBuffer { + private buf: Buffer + + public readLoc: number + + constructor(buf: Buffer) { + this.buf = buf + this.readLoc = 0 + } + + read(size: number): Buffer { + const sub = this.buf.subarray(this.readLoc, this.readLoc + size) + this.readLoc += size + return sub + } + + notEnd(): boolean { + return this.readLoc < this.buf.length + } +} + +export function parseSelectObjectContentResponse(res: Buffer): SelectResults { // extractHeaderType extracts the first half of the header message, the header type. - function extractHeaderType(stream) { - const headerNameLen = Buffer.from(stream.read(1)).readUInt8() - const headerNameWithSeparator = Buffer.from(stream.read(headerNameLen)).toString() - const splitBySeparator = (headerNameWithSeparator || '').split(':') - const headerName = splitBySeparator.length >= 1 ? splitBySeparator[1] : '' - return headerName + function extractHeaderType(stream: ReadableBuffer): string { + const headerNameLen = stream.read(1).readUInt8() + const headerNameWithSeparator = stream.read(headerNameLen).toString() + + const [_, name] = headerNameWithSeparator.split(':') + return name || '' } - function extractHeaderValue(stream) { - const bodyLen = Buffer.from(stream.read(2)).readUInt16BE() - const bodyName = Buffer.from(stream.read(bodyLen)).toString() - return bodyName + function extractHeaderValue(stream: ReadableBuffer) { + const bodyLen = stream.read(2).readUInt16BE() + return stream.read(bodyLen).toString() } const selectResults = new SelectResults({}) // will be returned - const responseStream = readableStream(res) // convert byte array to a readable responseStream - while (responseStream._readableState.length) { - // Top level responseStream read tracker. - let msgCrcAccumulator // accumulate from start of the message till the message crc start. - - const totalByteLengthBuffer = Buffer.from(responseStream.read(4)) - msgCrcAccumulator = crc32(totalByteLengthBuffer) + const responseStream = new ReadableBuffer(res) // convert byte array to a readable responseStream + while (responseStream.notEnd()) { + const totalByteLengthBuffer = responseStream.read(4) + let msgCrcAccumulator = newCrc32.buf(totalByteLengthBuffer) - const headerBytesBuffer = Buffer.from(responseStream.read(4)) - msgCrcAccumulator = crc32(headerBytesBuffer, msgCrcAccumulator) + const headerBytesBuffer = responseStream.read(4) + msgCrcAccumulator = newCrc32.buf(headerBytesBuffer, msgCrcAccumulator) - const calculatedPreludeCrc = msgCrcAccumulator.readInt32BE() // use it to check if any CRC mismatch in header itself. + const calculatedPreludeCrc = msgCrcAccumulator // use it to check if any CRC mismatch in header itself. - const preludeCrcBuffer = Buffer.from(responseStream.read(4)) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc) - msgCrcAccumulator = crc32(preludeCrcBuffer, msgCrcAccumulator) + const preludeCrcBuffer = responseStream.read(4) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc) + msgCrcAccumulator = newCrc32.buf(preludeCrcBuffer, msgCrcAccumulator) const totalMsgLength = totalByteLengthBuffer.readInt32BE() const headerLength = headerBytesBuffer.readInt32BE() @@ -613,40 +737,41 @@ export function parseSelectObjectContentResponse(res) { ) } - const headers = {} + const headers: Record = {} + if (headerLength > 0) { - const headerBytes = Buffer.from(responseStream.read(headerLength)) - msgCrcAccumulator = crc32(headerBytes, msgCrcAccumulator) - const headerReaderStream = readableStream(headerBytes) - while (headerReaderStream._readableState.length) { - let headerTypeName = extractHeaderType(headerReaderStream) + const headerBytes = responseStream.read(headerLength) + msgCrcAccumulator = newCrc32.buf(headerBytes, msgCrcAccumulator) + const headerReaderStream = new ReadableBuffer(headerBytes) + while (headerReaderStream.notEnd()) { + const headerTypeName = extractHeaderType(headerReaderStream) headerReaderStream.read(1) // just read and ignore it. headers[headerTypeName] = extractHeaderValue(headerReaderStream) } } - let payloadStream + let payloadStream: ReadableBuffer const payLoadLength = totalMsgLength - headerLength - 16 if (payLoadLength > 0) { - const payLoadBuffer = Buffer.from(responseStream.read(payLoadLength)) - msgCrcAccumulator = crc32(payLoadBuffer, msgCrcAccumulator) + const payLoadBuffer = responseStream.read(payLoadLength) + msgCrcAccumulator = newCrc32.buf(payLoadBuffer, msgCrcAccumulator) // read the checksum early and detect any mismatch so we can avoid unnecessary further processing. - const messageCrcByteValue = Buffer.from(responseStream.read(4)).readInt32BE() - const calculatedCrc = msgCrcAccumulator.readInt32BE() + const messageCrcByteValue = responseStream.read(4).readInt32BE() + const calculatedCrc = msgCrcAccumulator // Handle message CRC Error if (messageCrcByteValue !== calculatedCrc) { throw new Error( `Message Checksum Mismatch, Message CRC of ${messageCrcByteValue} does not equal expected CRC of ${calculatedCrc}`, ) } - payloadStream = readableStream(payLoadBuffer) + payloadStream = new ReadableBuffer(payLoadBuffer) } const messageType = headers['message-type'] switch (messageType) { case 'error': { - const errorMessage = headers['error-code'] + ':"' + headers['error-message'] + '"' + const errorMessage = `${headers['error-code']}:"${headers['error-message']}"` throw new Error(errorMessage) } case 'event': { @@ -660,6 +785,8 @@ export function parseSelectObjectContentResponse(res) { } case 'Records': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore const readData = payloadStream.read(payLoadLength) selectResults.setRecords(readData) break @@ -669,6 +796,8 @@ export function parseSelectObjectContentResponse(res) { { switch (contentType) { case 'text/xml': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore const progressData = payloadStream.read(payLoadLength) selectResults.setProgress(progressData.toString()) break @@ -684,6 +813,8 @@ export function parseSelectObjectContentResponse(res) { { switch (contentType) { case 'text/xml': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore const statsData = payloadStream.read(payLoadLength) selectResults.setStats(statsData.toString()) break @@ -706,4 +837,6 @@ export function parseSelectObjectContentResponse(res) { } // Event End } // messageType End } // Top Level Stream End + + throw new Error('unexpected end of stream') } diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index d0ab714b..e468cb11 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -23,30 +23,23 @@ import * as stream from 'node:stream' import * as url from 'node:url' import async from 'async' -import chai from 'chai' +import { assert } from 'chai' import _ from 'lodash' import { step } from 'mocha-steps' import splitFile from 'split-file' import superagent from 'superagent' import * as uuid from 'uuid' -import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.js' -import { - CopyDestinationOptions, - CopySourceOptions, - DEFAULT_REGION, - getVersionId, - isArray, - removeDirAndFiles, -} from '../../src/helpers.js' -import * as minio from '../../src/minio.js' - -const assert = chai.assert +import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.ts' +import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, getVersionId, isArray } from '../../src/helpers.ts' +import { removeDirAndFiles } from '../../src/helpers.ts' +import * as minio from '../../src/minio.ts' +import { Client } from '../../src/minio.ts' const isWindowsPlatform = process.platform === 'win32' describe('functional tests', function () { - this.timeout(30 * 60 * 1000) + this.timeout(10 * 60 * 1000) var clientConfigParams = {} var region_conf_env = process.env['MINIO_REGION'] @@ -75,7 +68,7 @@ describe('functional tests', function () { console.error(`Error: SECRET_KEY Environment variable is not set`) process.exit(1) } - clientConfigParams.useSSL = enable_https_env == '1' + clientConfigParams.useSSL = enable_https_env === '1' } else { // If credentials aren't given, default to play.min.io. clientConfigParams.endPoint = 'play.min.io' @@ -95,10 +88,10 @@ describe('functional tests', function () { // a directory with files to read from, i.e. /mint/data. var dataDir = process.env['MINT_DATA_DIR'] - var client = new minio.Client(clientConfigParams) + var client = new Client(clientConfigParams) var usEastConfig = clientConfigParams usEastConfig.region = server_region - var clientUsEastRegion = new minio.Client(usEastConfig) + var clientUsEastRegion = new Client(usEastConfig) var traceStream // FUNCTIONAL_TEST_TRACE env variable contains the path to which trace @@ -109,7 +102,9 @@ describe('functional tests', function () { if (trace_func_test_file_path === 'process.stdout') { traceStream = process.stdout } else { - traceStream = fs.createWriteStream(trace_func_test_file_path, { flags: 'a' }) + traceStream = fs.createWriteStream(trace_func_test_file_path, { + flags: 'a', + }) } traceStream.write('====================================\n') client.traceOn(traceStream) @@ -140,7 +135,9 @@ describe('functional tests', function () { var _5mbmd5 = crypto.createHash('md5').update(_5mb).digest('hex') // create new http agent to check requests release sockets - var httpAgent = (clientConfigParams.useSSL ? https : http).Agent({ keepAlive: true }) + var httpAgent = (clientConfigParams.useSSL ? https : http).Agent({ + keepAlive: true, + }) client.setRequestOptions({ agent: httpAgent }) var metaData = { 'Content-Type': 'text/html', @@ -338,7 +335,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUpload, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUpload, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -352,7 +349,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) var tmpFileUploadWithExt = `${tmpDir}/${_100kbObjectName}.txt` step( @@ -361,7 +358,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUploadWithExt, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, metaData, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -377,7 +374,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) step( `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUploadWithExt}_`, @@ -385,7 +382,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUploadWithExt, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -399,7 +396,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}, metaData:${metaData}_`, @@ -407,7 +404,7 @@ describe('functional tests', function () { var stream = readableStream(_100kb) client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, metaData, done) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}_`, @@ -415,7 +412,7 @@ describe('functional tests', function () { var stream = readableStream(_100kb) client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, @@ -435,14 +432,14 @@ describe('functional tests', function () { }) }) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_`, (done) => { - client.putObject(bucketName, _100kbObjectBufferName, _100kb, '', done) + client.putObject(bucketName, _100kbObjectBufferName, _100kb, done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, @@ -462,7 +459,7 @@ describe('functional tests', function () { }) }) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, metaData)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_, metaData:{}`, @@ -472,7 +469,7 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:0, length=1024_`, @@ -485,7 +482,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024, length=1024_`, @@ -505,7 +502,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024`, @@ -524,7 +521,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, @@ -537,7 +534,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, metadata, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, @@ -552,7 +549,7 @@ describe('functional tests', function () { }, 100) }) }, - ) + ).timeout(5000) step(`getObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { var hash = crypto.createHash('md5') @@ -836,7 +833,7 @@ describe('functional tests', function () { step( `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}, metaData:${metaData}`, (done) => { - client.initiateNewMultipartUpload(bucketName, _65mbObjectName, metaData, done) + client.initiateNewMultipartUpload(bucketName, _65mbObjectName, metaData).finally(done) }, ) step( @@ -946,15 +943,12 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `fPutObject(bucketName, objectName, filePath, metaData)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}_`, - (done) => { - client - .fPutObject(bucketName, _65mbObjectName, tmpFileUpload) - .then(() => done()) - .catch(done) + async () => { + await client.fPutObject(bucketName, _65mbObjectName, tmpFileUpload) }, ) @@ -966,7 +960,7 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `removeObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, @@ -975,7 +969,7 @@ describe('functional tests', function () { fs.unlinkSync(tmpFileDownload) client.removeObject(bucketName, _65mbObjectName, done) }, - ) + ).timeout(5000) }) describe('fGetObject-resume', () => { var localFile = `${tmpDir}/${_5mbObjectName}` @@ -1915,12 +1909,12 @@ describe('functional tests', function () { poller.removeAllListeners('notification') // clean up object now client.removeObject(bucketName, objectName, done) - }, 11 * 1000) + }, 10 * 1000) }) }, ) - }) - }) + }).timeout(120 * 1000) + }).timeout(120 * 1000) describe('Bucket Versioning API', () => { // Isolate the bucket/object for easy debugging and tracking. @@ -1952,7 +1946,7 @@ describe('functional tests', function () { }) }) - step('Suspend versioning on a bucket', (done) => { + step('Suspend versioning on a bucket', (done) => { client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { if (err && err.code === 'NotImplemented') { return done() @@ -2228,91 +2222,102 @@ describe('functional tests', function () { step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${versionedBucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, (done) => { - if (isVersioningSupported) { - let count = 1 - objVersionIdCounter.forEach(() => { - client.putObject( - versionedBucketName, - objNameWithPrefix, - readableStream(_1byte), - _1byte.length, - {}, - (e, data) => { - objArray.push(data) - if (count === objVersionIdCounter.length) { - done() - } - count += 1 - }, - ) - }) - } else { + if (!isVersioningSupported) { done() + return } + + let count = 1 + objVersionIdCounter.forEach(() => { + client.putObject( + versionedBucketName, + objNameWithPrefix, + readableStream(_1byte), + _1byte.length, + {}, + (e, data) => { + if (e) { + done(e) + } + objArray.push(data) + if (count === objVersionIdCounter.length) { + done() + } + count += 1 + }, + ) + }) }, ) step( `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, (done) => { - if (isVersioningSupported) { - client - .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray.length, listPrefixArray.length)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data) - }) - } else { + if (!isVersioningSupported) { done() + return } + + client + .listObjects(versionedBucketName, '', true, { + IncludeVersion: true, + }) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray.length, listPrefixArray.length)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data) + }) }, ) step( `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: ${prefixName}, recursive:true_`, (done) => { - if (isVersioningSupported) { - listPrefixArray = [] - client - .listObjects(versionedBucketName, prefixName, true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray.length, listPrefixArray.length)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data) - }) - } else { + if (!isVersioningSupported) { done() + return } + + listPrefixArray = [] + client + .listObjects(versionedBucketName, prefixName, true, { + IncludeVersion: true, + }) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray.length, listPrefixArray.length)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data) + }) }, ) step( `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}_Remove ${listObjectsNum} objects`, (done) => { - if (isVersioningSupported) { - let count = 1 - listPrefixArray.forEach((item) => { - client.removeObject(versionedBucketName, item.name, { versionId: item.versionId }, () => { - if (count === listPrefixArray.length) { - done() - } - count += 1 - }) - }) - } else { + if (!isVersioningSupported) { done() + return } + + let count = 1 + listPrefixArray.forEach((item) => { + client.removeObject(versionedBucketName, item.name, { versionId: item.versionId }, () => { + if (count === listPrefixArray.length) { + done() + } + count += 1 + }) + }) }, ) }) @@ -2381,7 +2386,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) + .listObjects(versionedBucketName, '', true, { + IncludeVersion: true, + }) .on('error', done) .on('end', () => { if (_.isEqual(2, objVersionList.length)) { @@ -3169,7 +3176,10 @@ describe('functional tests', function () { client.removeObject( objRetentionBucket, retentionObjName, - { versionId: versionId, governanceBypass: true }, + { + versionId: versionId, + governanceBypass: true, + }, () => { done() }, @@ -3412,7 +3422,10 @@ describe('functional tests', function () { client.setObjectLegalHold( objLegalHoldBucketName, objLegalHoldObjName, - { status: 'ON', versionId: versionId }, + { + status: 'ON', + versionId: versionId, + }, () => { done() }, @@ -3443,7 +3456,10 @@ describe('functional tests', function () { client.setObjectLegalHold( objLegalHoldBucketName, objLegalHoldObjName, - { status: 'OFF', versionId: versionId }, + { + status: 'OFF', + versionId: versionId, + }, () => { done() }, @@ -3474,7 +3490,10 @@ describe('functional tests', function () { client.removeObject( objLegalHoldBucketName, objLegalHoldObjName, - { versionId: versionId, governanceBypass: true }, + { + versionId: versionId, + governanceBypass: true, + }, () => { done() }, @@ -3788,9 +3807,11 @@ describe('functional tests', function () { secretKey: client.secretKey, }) - const aRoleConf = Object.assign({}, clientConfigParams, { credentialsProvider: assumeRoleProvider }) + const aRoleConf = Object.assign({}, clientConfigParams, { + credentialsProvider: assumeRoleProvider, + }) - const assumeRoleClient = new minio.Client(aRoleConf) + const assumeRoleClient = new Client(aRoleConf) assumeRoleClient.region = server_region describe('Put an Object', function () { @@ -3950,7 +3971,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .removeObject(bucketToTestMultipart, _100kbObjectName, { versionId: versionedObjectRes.versionId }) + .removeObject(bucketToTestMultipart, _100kbObjectName, { + versionId: versionedObjectRes.versionId, + }) .then(() => done()) .catch(done) } else { @@ -3992,7 +4015,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .removeObject(bucketToTestMultipart, _65mbObjectName, { versionId: versionedMultiPartObjectRes.versionId }) + .removeObject(bucketToTestMultipart, _65mbObjectName, { + versionId: versionedMultiPartObjectRes.versionId, + }) .then(() => done()) .catch(done) } else { @@ -4277,7 +4302,8 @@ describe('functional tests', function () { }, ) }) - describe('Test listIncompleteUploads (Multipart listing) with special characters', () => { + describe('Test listIncompleteUploads (Multipart listing) with special characters', function () { + this.timeout(30 * 1000) const specialCharPrefix = 'SpecialMenùäöüexPrefix/' const objectNameSpecialChars = 'äöüex.pdf' const spObjWithPrefix = `${specialCharPrefix}${objectNameSpecialChars}` @@ -4289,7 +4315,7 @@ describe('functional tests', function () { step( `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${spBucketName}, objectName:${spObjWithPrefix}, metaData:${metaData}`, (done) => { - client.initiateNewMultipartUpload(spBucketName, spObjWithPrefix, metaData, done) + client.initiateNewMultipartUpload(spBucketName, spObjWithPrefix, metaData).finally(done) }, ) @@ -4392,13 +4418,22 @@ describe('functional tests', function () { `selectObjectContent(bucketName, objectName, selectOpts)_bucketName:${selObjContentBucket}, objectName:${selObject}`, (done) => { const selectOpts = { - expression: 'SELECT * FROM s3object s where s."Name" = \'Jane\'', + expression: `SELECT * FROM s3object s where s."Name" = 'Jane'`, expressionType: 'SQL', inputSerialization: { - CSV: { FileHeaderInfo: 'Use', RecordDelimiter: '\n', FieldDelimiter: ',' }, + CSV: { + FileHeaderInfo: 'Use', + RecordDelimiter: '\n', + FieldDelimiter: ',', + }, CompressionType: 'NONE', }, - outputSerialization: { CSV: { RecordDelimiter: '\n', FieldDelimiter: ',' } }, + outputSerialization: { + CSV: { + RecordDelimiter: '\n', + FieldDelimiter: ',', + }, + }, requestProgress: { Enabled: true }, } @@ -4598,7 +4633,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .listObjects(fdPrefixBucketName, '/my-prefix', true, { IncludeVersion: true }) + .listObjects(fdPrefixBucketName, '/my-prefix', true, { + IncludeVersion: true, + }) .on('error', done) .on('end', () => { if (_.isEqual(0, objVersionList.length)) { diff --git a/tests/unit/test.js b/tests/unit/test.js index ceaf4256..c199452d 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -14,7 +14,7 @@ * limitations under the License. */ -import * as Stream from 'node:stream' +import Stream from 'node:stream' import { assert } from 'chai' import Nock from 'nock' @@ -29,8 +29,9 @@ import { makeDateLong, makeDateShort, partsRequired, -} from '../../src/helpers.js' -import * as Minio from '../../src/minio.js' +} from '../../src/helpers.ts' +import * as Minio from '../../src/minio.ts' +import { Client } from '../../src/minio.ts' const Package = { version: 'development' } @@ -243,7 +244,7 @@ describe('Client', function () { } }) }) - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -252,7 +253,7 @@ describe('Client', function () { }) describe('new client', () => { it('should work with https', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -260,7 +261,7 @@ describe('Client', function () { assert.equal(client.port, 443) }) it('should override port with http', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -270,7 +271,7 @@ describe('Client', function () { assert.equal(client.port, 9000) }) it('should work with http', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -279,7 +280,7 @@ describe('Client', function () { assert.equal(client.port, 80) }) it('should override port with https', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -289,7 +290,7 @@ describe('Client', function () { }) it('should fail with url', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'http://localhost:9000', accessKey: 'accesskey', secretKey: 'secretkey', @@ -300,7 +301,7 @@ describe('Client', function () { }) it('should fail with alphanumeric', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'localhost##$@3', accessKey: 'accesskey', secretKey: 'secretkey', @@ -311,7 +312,7 @@ describe('Client', function () { }) it('should fail with no url', (done) => { try { - new Minio.Client({ + new Client({ accessKey: 'accesskey', secretKey: 'secretkey', }) @@ -321,7 +322,7 @@ describe('Client', function () { }) it('should fail with bad port', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'localhost', port: -1, accessKey: 'accesskey', @@ -333,7 +334,7 @@ describe('Client', function () { }) it('should fail when secure param is passed', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'localhost', secure: false, port: 9000, @@ -346,7 +347,7 @@ describe('Client', function () { }) it('should fail when secure param is passed', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'localhost', secure: true, port: 9000, @@ -362,7 +363,7 @@ describe('Client', function () { describe('presigned-get', () => { it('should not generate presigned url with no access key', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, useSSL: false, @@ -383,7 +384,7 @@ describe('Client', function () { describe('presigned-put', () => { it('should not generate presigned url with no access key', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, useSSL: false, @@ -436,7 +437,7 @@ describe('Client', function () { }) describe('User Agent', () => { it('should have a default user agent', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -444,7 +445,7 @@ describe('Client', function () { assert.equal(`MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version}`, client.userAgent) }) it('should set user agent', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -456,7 +457,7 @@ describe('Client', function () { ) }) it('should set user agent without comments', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -469,7 +470,7 @@ describe('Client', function () { }) it('should not set user agent without name', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -481,7 +482,7 @@ describe('Client', function () { }) it('should not set user agent with empty name', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -493,7 +494,7 @@ describe('Client', function () { }) it('should not set user agent without version', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -505,7 +506,7 @@ describe('Client', function () { }) it('should not set user agent with empty version', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -960,40 +961,6 @@ describe('Client', function () { } }) }) - describe('Put Object Tags', () => { - it('should fail on null object', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on non object tags', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, 'non-obj-tag', function () {}) - } catch (e) { - done() - } - }) - it('should fail if tags are more than 50 on an object', (done) => { - const _50_plus_key_tags = {} - for (let i = 0; i < 51; i += 1) { - _50_plus_key_tags[i] = i - } - try { - client.putObjectTagging('my-bucket-name', null, _50_plus_key_tags, function () {}) - } catch (e) { - done() - } - }) - }) describe('Get Object Tags', () => { it('should fail on invalid bucket', (done) => { try { diff --git a/types/minio.d.ts b/types/minio.d.ts index dbd85e74..e69de29b 100644 --- a/types/minio.d.ts +++ b/types/minio.d.ts @@ -1,775 +0,0 @@ -// imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/93cfb0ec069731dcdfc31464788613f7cddb8192/types/minio/index.d.ts - -import { EventEmitter } from 'node:events' -import type { RequestOptions } from 'node:https' -import type { Readable as ReadableStream } from 'node:stream' - -// Exports only from typings -export type Region = - | 'us-east-1' - | 'us-west-1' - | 'us-west-2' - | 'eu-west-1' - | 'eu-central-1' - | 'ap-southeast-1' - | 'ap-northeast-1' - | 'ap-southeast-2' - | 'sa-east-1' - | 'cn-north-1' - | string -export type NotificationEvent = - | 's3:ObjectCreated:*' - | 's3:ObjectCreated:Put' - | 's3:ObjectCreated:Post' - | 's3:ObjectCreated:Copy' - | 's3:ObjectCreated:CompleteMultipartUpload' - | 's3:ObjectRemoved:*' - | 's3:ObjectRemoved:Delete' - | 's3:ObjectRemoved:DeleteMarkerCreated' - | 's3:ReducedRedundancyLostObject' - | 's3:TestEvent' - | 's3:ObjectRestore:Post' - | 's3:ObjectRestore:Completed' - | 's3:Replication:OperationFailedReplication' - | 's3:Replication:OperationMissedThreshold' - | 's3:Replication:OperationReplicatedAfterThreshold' - | 's3:Replication:OperationNotTracked' - | string -export type Mode = 'COMPLIANCE' | 'GOVERNANCE' -export type LockUnit = 'Days' | 'Years' -export type LegalHoldStatus = 'ON' | 'OFF' -export type NoResultCallback = (error: Error | null) => void -export type ResultCallback = (error: Error | null, result: T) => void -export type VersioningConfig = Record -export type TagList = Record -export type EmptyObject = Record -export type VersionIdentificator = Pick -export type Lifecycle = LifecycleConfig | null | '' -export type Lock = LockConfig | EmptyObject -export type Encryption = EncryptionConfig | EmptyObject -export type Retention = RetentionOptions | EmptyObject -export type IsoDate = string - -export interface ClientOptions { - endPoint: string - accessKey: string - secretKey: string - useSSL?: boolean | undefined - port?: number | undefined - region?: Region | undefined - transport?: any - sessionToken?: string | undefined - partSize?: number | undefined - pathStyle?: boolean | undefined -} - -export interface BucketItemFromList { - name: string - creationDate: Date -} - -export interface BucketItemCopy { - etag: string - lastModified: Date -} - -export interface BucketItem { - name: string - prefix: string - size: number - etag: string - lastModified: Date -} - -export interface BucketItemWithMetadata extends BucketItem { - metadata: ItemBucketMetadata | ItemBucketMetadataList -} - -export interface BucketItemStat { - size: number - etag: string - lastModified: Date - metaData: ItemBucketMetadata -} - -export interface IncompleteUploadedBucketItem { - key: string - uploadId: string - size: number -} - -export interface BucketStream extends ReadableStream { - on(event: 'data', listener: (item: T) => void): this - - on(event: 'end' | 'pause' | 'readable' | 'resume' | 'close', listener: () => void): this - - on(event: 'error', listener: (err: Error) => void): this - - on(event: string | symbol, listener: (...args: any[]) => void): this -} - -export interface PostPolicyResult { - postURL: string - formData: { - [key: string]: any - } -} - -export interface MetadataItem { - Key: string - Value: string -} - -export interface ItemBucketMetadataList { - Items: MetadataItem[] -} - -export interface ItemBucketMetadata { - [key: string]: any -} - -export interface UploadedObjectInfo { - etag: string - versionId: string | null -} - -export interface Tag { - Key: string - Value: string -} - -export interface LifecycleConfig { - Rule: LifecycleRule[] -} - -export interface LifecycleRule { - [key: string]: any -} - -export interface LockConfig { - mode: Mode - unit: LockUnit - validity: number -} - -export interface EncryptionConfig { - Rule: EncryptionRule[] -} - -export interface EncryptionRule { - [key: string]: any -} - -export interface ReplicationConfig { - role: string - rules: [] -} - -export interface ReplicationConfig { - [key: string]: any -} - -export interface RetentionOptions { - versionId: string - mode?: Mode - retainUntilDate?: IsoDate - governanceBypass?: boolean -} - -export interface LegalHoldOptions { - versionId: string - status: LegalHoldStatus -} - -export interface InputSerialization { - CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' - CSV?: { - AllowQuotedRecordDelimiter?: boolean - Comments?: string - FieldDelimiter?: string - FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' - QuoteCharacter?: string - QuoteEscapeCharacter?: string - RecordDelimiter?: string - } - JSON?: { - Type: 'DOCUMENT' | 'LINES' - } - Parquet?: EmptyObject -} - -export interface OutputSerialization { - CSV?: { - FieldDelimiter?: string - QuoteCharacter?: string - QuoteEscapeCharacter?: string - QuoteFields?: string - RecordDelimiter?: string - } - JSON?: { - RecordDelimiter?: string - } -} - -export interface SelectOptions { - expression: string - expressionType?: string - inputSerialization: InputSerialization - outputSerialization: OutputSerialization - requestProgress?: { Enabled: boolean } - scanRange?: { Start: number; End: number } -} - -export interface SourceObjectStats { - size: number - metaData: string - lastModicied: Date - versionId: string - etag: string -} - -// No need to export this. But without it - linter error. -export class TargetConfig { - setId(id: any): void - - addEvent(newEvent: any): void - - addFilterSuffix(suffix: any): void - - addFilterPrefix(prefix: any): void -} - -export interface MakeBucketOpt { - ObjectLocking: boolean -} - -export interface RemoveOptions { - versionId?: string - governanceBypass?: boolean -} - -// Exports from library -export class Client { - constructor(options: ClientOptions) - - // Bucket operations - makeBucket(bucketName: string, region: Region, makeOpts: MakeBucketOpt, callback: NoResultCallback): void - makeBucket(bucketName: string, region: Region, callback: NoResultCallback): void - makeBucket(bucketName: string, callback: NoResultCallback): void - makeBucket(bucketName: string, region?: Region, makeOpts?: MakeBucketOpt): Promise - - listBuckets(callback: ResultCallback): void - listBuckets(): Promise - - bucketExists(bucketName: string, callback: ResultCallback): void - bucketExists(bucketName: string): Promise - - removeBucket(bucketName: string, callback: NoResultCallback): void - removeBucket(bucketName: string): Promise - - listObjects(bucketName: string, prefix?: string, recursive?: boolean): BucketStream - - listObjectsV2(bucketName: string, prefix?: string, recursive?: boolean, startAfter?: string): BucketStream - - listIncompleteUploads( - bucketName: string, - prefix?: string, - recursive?: boolean, - ): BucketStream - - getBucketVersioning(bucketName: string, callback: ResultCallback): void - getBucketVersioning(bucketName: string): Promise - - setBucketVersioning(bucketName: string, versioningConfig: any, callback: NoResultCallback): void - setBucketVersioning(bucketName: string, versioningConfig: any): Promise - - getBucketTagging(bucketName: string, callback: ResultCallback): void - getBucketTagging(bucketName: string): Promise - - setBucketTagging(bucketName: string, tags: TagList, callback: NoResultCallback): void - setBucketTagging(bucketName: string, tags: TagList): Promise - - removeBucketTagging(bucketName: string, callback: NoResultCallback): void - removeBucketTagging(bucketName: string): Promise - - setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle, callback: NoResultCallback): void - setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle): Promise - - getBucketLifecycle(bucketName: string, callback: ResultCallback): void - getBucketLifecycle(bucketName: string): Promise - - removeBucketLifecycle(bucketName: string, callback: NoResultCallback): void - removeBucketLifecycle(bucketName: string): Promise - - setObjectLockConfig(bucketName: string, callback: NoResultCallback): void - setObjectLockConfig(bucketName: string, lockConfig: Lock, callback: NoResultCallback): void - setObjectLockConfig(bucketName: string, lockConfig?: Lock): Promise - - getObjectLockConfig(bucketName: string, callback: ResultCallback): void - getObjectLockConfig(bucketName: string): Promise - - getBucketEncryption(bucketName: string, callback: ResultCallback): void - getBucketEncryption(bucketName: string): Promise - - setBucketEncryption(bucketName: string, encryptionConfig: Encryption, callback: NoResultCallback): void - setBucketEncryption(bucketName: string, encryptionConfig: Encryption): Promise - - removeBucketEncryption(bucketName: string, callback: NoResultCallback): void - removeBucketEncryption(bucketName: string): Promise - - setBucketReplication(bucketName: string, replicationConfig: ReplicationConfig, callback: NoResultCallback): void - setBucketReplication(bucketName: string, replicationConfig: ReplicationConfig): Promise - - getBucketReplication(bucketName: string, callback: ResultCallback): void - getBucketReplication(bucketName: string): Promise - - removeBucketReplication(bucketName: string, callback: NoResultCallback): void - removeBucketReplication(bucketName: string): Promise - - // Object operations - getObject(bucketName: string, objectName: string, callback: ResultCallback): void - getObject(bucketName: string, objectName: string): Promise - - getPartialObject( - bucketName: string, - objectName: string, - offset: number, - callback: ResultCallback, - ): void - getPartialObject( - bucketName: string, - objectName: string, - offset: number, - length: number, - callback: ResultCallback, - ): void - getPartialObject(bucketName: string, objectName: string, offset: number, length?: number): Promise - - fGetObject(bucketName: string, objectName: string, filePath: string, callback: NoResultCallback): void - fGetObject(bucketName: string, objectName: string, filePath: string): Promise - - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size: number, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size: number, - metaData: ItemBucketMetadata, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size?: number, - metaData?: ItemBucketMetadata, - ): Promise - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - metaData?: ItemBucketMetadata, - ): Promise - - fPutObject( - bucketName: string, - objectName: string, - filePath: string, - metaData: ItemBucketMetadata, - callback: ResultCallback, - ): void - fPutObject( - bucketName: string, - objectName: string, - filePath: string, - metaData?: ItemBucketMetadata, - ): Promise - - copyObject( - bucketName: string, - objectName: string, - sourceObject: string, - conditions: CopyConditions, - callback: ResultCallback, - ): void - copyObject( - bucketName: string, - objectName: string, - sourceObject: string, - conditions: CopyConditions, - ): Promise - - statObject(bucketName: string, objectName: string, callback: ResultCallback): void - statObject(bucketName: string, objectName: string): Promise - - removeObject(bucketName: string, objectName: string, removeOpts: RemoveOptions, callback: NoResultCallback): void - removeObject(bucketName: string, objectName: string, callback: NoResultCallback): void - removeObject(bucketName: string, objectName: string, removeOpts?: RemoveOptions): Promise - - removeObjects(bucketName: string, objectsList: string[], callback: NoResultCallback): void - removeObjects(bucketName: string, objectsList: string[]): Promise - - removeIncompleteUpload(bucketName: string, objectName: string, callback: NoResultCallback): void - removeIncompleteUpload(bucketName: string, objectName: string): Promise - - putObjectRetention(bucketName: string, objectName: string, callback: NoResultCallback): void - putObjectRetention( - bucketName: string, - objectName: string, - retentionOptions: Retention, - callback: NoResultCallback, - ): void - putObjectRetention(bucketName: string, objectName: string, retentionOptions?: Retention): Promise - - getObjectRetention( - bucketName: string, - objectName: string, - options: VersionIdentificator, - callback: ResultCallback, - ): void - getObjectRetention(bucketName: string, objectName: string, options: VersionIdentificator): Promise - - // It seems, putObjectTagging is deprecated in favor or setObjectTagging - there is no such a method in the library source code - /** - * @deprecated Use setObjectTagging instead. - */ - putObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void - /** - * @deprecated Use setObjectTagging instead. - */ - putObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions: VersionIdentificator, - callback: NoResultCallback, - ): void - /** - * @deprecated Use setObjectTagging instead. - */ - putObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions?: VersionIdentificator, - ): Promise - - setObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void - setObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions: VersionIdentificator, - callback: NoResultCallback, - ): void - setObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions?: VersionIdentificator, - ): Promise - - removeObjectTagging(bucketName: string, objectName: string, callback: NoResultCallback): void - removeObjectTagging( - bucketName: string, - objectName: string, - removeOptions: VersionIdentificator, - callback: NoResultCallback, - ): void - removeObjectTagging(bucketName: string, objectName: string, removeOptions?: VersionIdentificator): Promise - - getObjectTagging(bucketName: string, objectName: string, callback: ResultCallback): void - getObjectTagging( - bucketName: string, - objectName: string, - getOptions: VersionIdentificator, - callback: ResultCallback, - ): void - getObjectTagging(bucketName: string, objectName: string, getOptions?: VersionIdentificator): Promise - - getObjectLegalHold(bucketName: string, objectName: string, callback: ResultCallback): void - getObjectLegalHold( - bucketName: string, - objectName: string, - getOptions: VersionIdentificator, - callback: ResultCallback, - ): void - getObjectLegalHold( - bucketName: string, - objectName: string, - getOptions?: VersionIdentificator, - ): Promise - - setObjectLegalHold(bucketName: string, objectName: string, callback: NoResultCallback): void - setObjectLegalHold( - bucketName: string, - objectName: string, - setOptions: LegalHoldOptions, - callback: NoResultCallback, - ): void - setObjectLegalHold(bucketName: string, objectName: string, setOptions?: LegalHoldOptions): Promise - - composeObject( - destObjConfig: CopyDestinationOptions, - sourceObjList: CopySourceOptions[], - callback: ResultCallback, - ): void - composeObject(destObjConfig: CopyDestinationOptions, sourceObjList: CopySourceOptions[]): Promise - - selectObjectContent( - bucketName: string, - objectName: string, - selectOpts: SelectOptions, - callback: NoResultCallback, - ): void - selectObjectContent(bucketName: string, objectName: string, selectOpts: SelectOptions): Promise - - // Presigned operations - presignedUrl(httpMethod: string, bucketName: string, objectName: string, callback: ResultCallback): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - reqParams: { [key: string]: any }, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - reqParams: { [key: string]: any }, - requestDate: Date, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry?: number, - reqParams?: { [key: string]: any }, - requestDate?: Date, - ): Promise - - presignedGetObject(bucketName: string, objectName: string, callback: ResultCallback): void - presignedGetObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry: number, - respHeaders: { [key: string]: any }, - callback: ResultCallback, - ): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry: number, - respHeaders: { [key: string]: any }, - requestDate: Date, - callback: ResultCallback, - ): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry?: number, - respHeaders?: { [key: string]: any }, - requestDate?: Date, - ): Promise - - presignedPutObject(bucketName: string, objectName: string, callback: ResultCallback): void - presignedPutObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void - presignedPutObject(bucketName: string, objectName: string, expiry?: number): Promise - - presignedPostPolicy(policy: PostPolicy, callback: ResultCallback): void - presignedPostPolicy(policy: PostPolicy): Promise - - // Bucket Policy & Notification operations - getBucketNotification(bucketName: string, callback: ResultCallback): void - getBucketNotification(bucketName: string): Promise - - setBucketNotification( - bucketName: string, - bucketNotificationConfig: NotificationConfig, - callback: NoResultCallback, - ): void - setBucketNotification(bucketName: string, bucketNotificationConfig: NotificationConfig): Promise - - removeAllBucketNotification(bucketName: string, callback: NoResultCallback): void - removeAllBucketNotification(bucketName: string): Promise - - getBucketPolicy(bucketName: string, callback: ResultCallback): void - getBucketPolicy(bucketName: string): Promise - - setBucketPolicy(bucketName: string, bucketPolicy: string, callback: NoResultCallback): void - setBucketPolicy(bucketName: string, bucketPolicy: string): Promise - - listenBucketNotification( - bucketName: string, - prefix: string, - suffix: string, - events: NotificationEvent[], - ): NotificationPoller - - // Custom Settings - setS3TransferAccelerate(endpoint: string): void - - // Other - newPostPolicy(): PostPolicy - - setRequestOptions(options: RequestOptions): void - - // Minio extensions that aren't necessary present for Amazon S3 compatible storage servers - extensions: { - listObjectsV2WithMetadata( - bucketName: string, - prefix?: string, - recursive?: boolean, - startAfter?: string, - ): BucketStream - } -} - -export namespace Policy { - const NONE: 'none' - const READONLY: 'readonly' - const WRITEONLY: 'writeonly' - const READWRITE: 'readwrite' -} - -export class CopyConditions { - setModified(date: Date): void - - setUnmodified(date: Date): void - - setMatchETag(etag: string): void - - setMatchETagExcept(etag: string): void -} - -export class PostPolicy { - setExpires(date: Date): void - - setKey(objectName: string): void - - setKeyStartsWith(prefix: string): void - - setBucket(bucketName: string): void - - setContentType(type: string): void - - setContentTypeStartsWith(prefix: string): void - - setContentLengthRange(min: number, max: number): void - - setContentDisposition(disposition: string): void - - setUserMetaData(metadata: Record): void -} - -export class NotificationPoller extends EventEmitter { - stop(): void - - start(): void - - // must to be public? - checkForChanges(): void -} - -export class NotificationConfig { - add(target: TopicConfig | QueueConfig | CloudFunctionConfig): void -} - -export class TopicConfig extends TargetConfig { - constructor(arn: string) -} - -export class QueueConfig extends TargetConfig { - constructor(arn: string) -} - -export class CloudFunctionConfig extends TargetConfig { - constructor(arn: string) -} - -export class CopySourceOptions { - constructor(options: { - Bucket: string - Object: string - VersionID?: string - MatchETag?: string - NoMatchETag?: string - MatchModifiedSince?: string - MatchUnmodifiedSince?: string - MatchRange?: boolean - Start?: number - End?: number - Encryption?: { - type: string - SSEAlgorithm?: string - KMSMasterKeyID?: string - } - }) - - getHeaders(): Record - - validate(): boolean -} - -export class CopyDestinationOptions { - constructor(options: { - Bucket: string - Object: string - Encryption?: { - type: string - SSEAlgorithm?: string - KMSMasterKeyID?: string - } - UserMetadata?: Record - UserTags?: Record | string - LegalHold?: LegalHoldStatus - RetainUntilDate?: string - Mode?: Mode - }) - - getHeaders(): Record - - validate(): boolean -} - -export function buildARN( - partition: string, - service: string, - region: string, - accountId: string, - resource: string, -): string - -export const ObjectCreatedAll: NotificationEvent // s3:ObjectCreated:*' -export const ObjectCreatedPut: NotificationEvent // s3:ObjectCreated:Put -export const ObjectCreatedPost: NotificationEvent // s3:ObjectCreated:Post -export const ObjectCreatedCopy: NotificationEvent // s3:ObjectCreated:Copy -export const ObjectCreatedCompleteMultipartUpload: NotificationEvent // s3:ObjectCreated:CompleteMultipartUpload -export const ObjectRemovedAll: NotificationEvent // s3:ObjectRemoved:* -export const ObjectRemovedDelete: NotificationEvent // s3:ObjectRemoved:Delete -export const ObjectRemovedDeleteMarkerCreated: NotificationEvent // s3:ObjectRemoved:DeleteMarkerCreated -export const ObjectReducedRedundancyLostObject: NotificationEvent // s3:ReducedRedundancyLostObject From ca09d53027a65c4c133ad88e027bebd2df2875eb Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 21:40:58 +0800 Subject: [PATCH 15/78] fix --- package-lock.json | 127 ++++++++++++++++++--------- package.json | 13 +-- tests/functional/functional-tests.js | 7 +- tests/unit/test.js | 41 +++++---- types/minio.d.ts | 0 5 files changed, 118 insertions(+), 70 deletions(-) delete mode 100644 types/minio.d.ts diff --git a/package-lock.json b/package-lock.json index 9062b008..40cf1ea4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,16 +11,16 @@ "dependencies": { "async": "^3.1.0", "block-stream2": "^2.0.0", - "browser-or-node": "^1.3.0", - "buffer-crc32": "^0.2.13", - "fast-xml-parser": "^4.1.3", + "browser-or-node": "^2.1.1", + "crc-32": "^1.2.2", + "fast-xml-parser": "^4.2.2", "ipaddr.js": "^2.0.1", "json-stream": "^1.0.0", "lodash": "^4.17.21", "mime-types": "^2.1.14", - "mkdirp": "^0.5.1", + "mkdirp": "^3.0.1", "query-string": "^7.1.1", - "through2": "^3.0.1", + "through2": "^4.0.2", "web-encoding": "^1.1.5", "xml": "^1.0.0", "xml2js": "^0.5.0" @@ -32,10 +32,13 @@ "@babel/register": "^7.21.0", "@nodelib/fs.walk": "^1.2.8", "@types/async": "^3.2.18", + "@types/block-stream2": "^2.1.0", "@types/browser-or-node": "^1.3.0", "@types/lodash": "^4.14.192", "@types/mime-types": "^2.1.1", "@types/node": "^18.15.11", + "@types/stream-json": "^1.7.3", + "@types/through2": "^2.0.38", "@types/xml": "^1.0.8", "@types/xml2js": "^0.4.11", "@typescript-eslint/eslint-plugin": "^5.57.1", @@ -2177,6 +2180,15 @@ "integrity": "sha512-/IsuXp3B9R//uRLi40VlIYoMp7OzhkunPe2fDu7jGfQXI9y3CDCx6FC4juRLSqrpmLst3vgsiK536AAGJFl4Ww==", "dev": true }, + "node_modules/@types/block-stream2": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/block-stream2/-/block-stream2-2.1.0.tgz", + "integrity": "sha512-ue1bw4ZKeWIudQfKFvKAudFwpZ1Co1DzUCFxeJWnYGnpiGGZ9SU4gNb9NCSVctZ64W/L4SSVYM77nuhZZ8V0Ew==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/browser-or-node": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/@types/browser-or-node/-/browser-or-node-1.3.0.tgz", @@ -2229,6 +2241,34 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/stream-chain": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stream-chain/-/stream-chain-2.0.1.tgz", + "integrity": "sha512-D+Id9XpcBpampptkegH7WMsEk6fUdf9LlCIX7UhLydILsqDin4L0QT7ryJR0oycwC7OqohIzdfcMHVZ34ezNGg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/stream-json": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@types/stream-json/-/stream-json-1.7.3.tgz", + "integrity": "sha512-Jqsyq5VPOTWorvEmzWhEWH5tJnHA+bB8vt/Zzb11vSDj8esfSHDMj2rbVjP0mfJQzl3YBJSXBBq08iiyaBK3KA==", + "dev": true, + "dependencies": { + "@types/node": "*", + "@types/stream-chain": "*" + } + }, + "node_modules/@types/through2": { + "version": "2.0.38", + "resolved": "https://registry.npmjs.org/@types/through2/-/through2-2.0.38.tgz", + "integrity": "sha512-YFu+nHmjxMurkH1BSzA0Z1WrKDAY8jUKPZctNQn7mc+/KKtp2XxnclHFXxdB1m7Iqnzb5aywgP8TMK283LezGQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/xml": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/xml/-/xml-1.0.8.tgz", @@ -2950,10 +2990,9 @@ } }, "node_modules/browser-or-node": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/browser-or-node/-/browser-or-node-1.3.0.tgz", - "integrity": "sha512-0F2z/VSnLbmEeBcUrSuDH5l0HxTXdQQzLjkmBR4cYfvg1zJrKSlmIZFqyFR8oX0NrwPhy3c3HQ6i3OxMbew4Tg==", - "license": "MIT" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/browser-or-node/-/browser-or-node-2.1.1.tgz", + "integrity": "sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg==" }, "node_modules/browser-stdout": { "version": "1.3.1", @@ -2991,15 +3030,6 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, - "node_modules/buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "license": "MIT", - "engines": { - "node": "*" - } - }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", @@ -3376,6 +3406,17 @@ "url": "https://opencollective.com/core-js" } }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -4190,19 +4231,24 @@ "license": "MIT" }, "node_modules/fast-xml-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.0.tgz", - "integrity": "sha512-+zVQv4aVTO+o8oRUyRL7PjgeVo1J6oP8Cw2+a8UTZQcj5V0yUK5T63gTN0ldgiHDPghUjKc4OpT6SwMTwnOQug==", - "license": "MIT", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.2.tgz", + "integrity": "sha512-DLzIPtQqmvmdq3VUKR7T6omPK/VCRNqgFlGtbESfyhcH2R4I8EzK1/K6E8PkRCK2EabWrUHK32NjYRbEFnnz0Q==", + "funding": [ + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }, + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], "dependencies": { "strnum": "^1.0.5" }, "bin": { "fxparser": "src/cli/cli.js" - }, - "funding": { - "type": "paypal", - "url": "https://paypal.me/naturalintelligence" } }, "node_modules/fastq": { @@ -5761,21 +5807,24 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", - "license": "MIT", - "dependencies": { - "minimist": "^1.2.6" - }, + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", "bin": { - "mkdirp": "bin/cmd.js" + "mkdirp": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/mocha": { @@ -7449,13 +7498,11 @@ "license": "MIT" }, "node_modules/through2": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz", - "integrity": "sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ==", - "license": "MIT", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", + "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", "dependencies": { - "inherits": "^2.0.4", - "readable-stream": "2 || 3" + "readable-stream": "3" } }, "node_modules/tiny-glob": { diff --git a/package.json b/package.json index 55df7441..4a2602b5 100644 --- a/package.json +++ b/package.json @@ -85,16 +85,16 @@ "dependencies": { "async": "^3.1.0", "block-stream2": "^2.0.0", - "browser-or-node": "^1.3.0", - "buffer-crc32": "^0.2.13", - "fast-xml-parser": "^4.1.3", + "browser-or-node": "^2.1.1", + "crc-32": "^1.2.2", + "fast-xml-parser": "^4.2.2", "ipaddr.js": "^2.0.1", "json-stream": "^1.0.0", "lodash": "^4.17.21", "mime-types": "^2.1.14", - "mkdirp": "^0.5.1", + "mkdirp": "^3.0.1", "query-string": "^7.1.1", - "through2": "^3.0.1", + "through2": "^4.0.2", "web-encoding": "^1.1.5", "xml": "^1.0.0", "xml2js": "^0.5.0" @@ -106,10 +106,13 @@ "@babel/register": "^7.21.0", "@nodelib/fs.walk": "^1.2.8", "@types/async": "^3.2.18", + "@types/block-stream2": "^2.1.0", "@types/browser-or-node": "^1.3.0", "@types/lodash": "^4.14.192", "@types/mime-types": "^2.1.1", "@types/node": "^18.15.11", + "@types/stream-json": "^1.7.3", + "@types/through2": "^2.0.38", "@types/xml": "^1.0.8", "@types/xml2js": "^0.4.11", "@typescript-eslint/eslint-plugin": "^5.57.1", diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index e468cb11..a27b5ebd 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -34,7 +34,6 @@ import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.ts' import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, getVersionId, isArray } from '../../src/helpers.ts' import { removeDirAndFiles } from '../../src/helpers.ts' import * as minio from '../../src/minio.ts' -import { Client } from '../../src/minio.ts' const isWindowsPlatform = process.platform === 'win32' @@ -88,10 +87,10 @@ describe('functional tests', function () { // a directory with files to read from, i.e. /mint/data. var dataDir = process.env['MINT_DATA_DIR'] - var client = new Client(clientConfigParams) + var client = new minio.Client(clientConfigParams) var usEastConfig = clientConfigParams usEastConfig.region = server_region - var clientUsEastRegion = new Client(usEastConfig) + var clientUsEastRegion = new minio.Client(usEastConfig) var traceStream // FUNCTIONAL_TEST_TRACE env variable contains the path to which trace @@ -3811,7 +3810,7 @@ describe('functional tests', function () { credentialsProvider: assumeRoleProvider, }) - const assumeRoleClient = new Client(aRoleConf) + const assumeRoleClient = new minio.Client(aRoleConf) assumeRoleClient.region = server_region describe('Put an Object', function () { diff --git a/tests/unit/test.js b/tests/unit/test.js index c199452d..e540fd12 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -31,7 +31,6 @@ import { partsRequired, } from '../../src/helpers.ts' import * as Minio from '../../src/minio.ts' -import { Client } from '../../src/minio.ts' const Package = { version: 'development' } @@ -244,7 +243,7 @@ describe('Client', function () { } }) }) - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -253,7 +252,7 @@ describe('Client', function () { }) describe('new client', () => { it('should work with https', () => { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -261,7 +260,7 @@ describe('Client', function () { assert.equal(client.port, 443) }) it('should override port with http', () => { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -271,7 +270,7 @@ describe('Client', function () { assert.equal(client.port, 9000) }) it('should work with http', () => { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -280,7 +279,7 @@ describe('Client', function () { assert.equal(client.port, 80) }) it('should override port with https', () => { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -290,7 +289,7 @@ describe('Client', function () { }) it('should fail with url', (done) => { try { - new Client({ + new Minio.Client({ endPoint: 'http://localhost:9000', accessKey: 'accesskey', secretKey: 'secretkey', @@ -301,7 +300,7 @@ describe('Client', function () { }) it('should fail with alphanumeric', (done) => { try { - new Client({ + new Minio.Client({ endPoint: 'localhost##$@3', accessKey: 'accesskey', secretKey: 'secretkey', @@ -312,7 +311,7 @@ describe('Client', function () { }) it('should fail with no url', (done) => { try { - new Client({ + new Minio.Client({ accessKey: 'accesskey', secretKey: 'secretkey', }) @@ -322,7 +321,7 @@ describe('Client', function () { }) it('should fail with bad port', (done) => { try { - new Client({ + new Minio.Client({ endPoint: 'localhost', port: -1, accessKey: 'accesskey', @@ -334,7 +333,7 @@ describe('Client', function () { }) it('should fail when secure param is passed', (done) => { try { - new Client({ + new Minio.Client({ endPoint: 'localhost', secure: false, port: 9000, @@ -347,7 +346,7 @@ describe('Client', function () { }) it('should fail when secure param is passed', (done) => { try { - new Client({ + new Minio.Client({ endPoint: 'localhost', secure: true, port: 9000, @@ -363,7 +362,7 @@ describe('Client', function () { describe('presigned-get', () => { it('should not generate presigned url with no access key', (done) => { try { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', port: 9000, useSSL: false, @@ -384,7 +383,7 @@ describe('Client', function () { describe('presigned-put', () => { it('should not generate presigned url with no access key', (done) => { try { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', port: 9000, useSSL: false, @@ -437,7 +436,7 @@ describe('Client', function () { }) describe('User Agent', () => { it('should have a default user agent', () => { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -445,7 +444,7 @@ describe('Client', function () { assert.equal(`MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version}`, client.userAgent) }) it('should set user agent', () => { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -457,7 +456,7 @@ describe('Client', function () { ) }) it('should set user agent without comments', () => { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -470,7 +469,7 @@ describe('Client', function () { }) it('should not set user agent without name', (done) => { try { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -482,7 +481,7 @@ describe('Client', function () { }) it('should not set user agent with empty name', (done) => { try { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -494,7 +493,7 @@ describe('Client', function () { }) it('should not set user agent without version', (done) => { try { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -506,7 +505,7 @@ describe('Client', function () { }) it('should not set user agent with empty version', (done) => { try { - var client = new Client({ + var client = new Minio.Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', diff --git a/types/minio.d.ts b/types/minio.d.ts deleted file mode 100644 index e69de29b..00000000 From 346e9ec524e3a01ae3ce5bce2761d44c6f0ed527 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 21:43:41 +0800 Subject: [PATCH 16/78] type --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 4a2602b5..a6ba156c 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "S3 Compatible Cloud Storage client", "main": "./dist/main/minio.js", "module": "./dist/esm/minio.mjs", - "types": "./types/minio.d.ts", + "types": "./dist/main/minio.d.ts", "scripts": { "prepare": "husky install", "tsc": "tsc", @@ -22,7 +22,7 @@ }, "exports": { ".": { - "types": "./types/minio.d.ts", + "types": "./dist/main/minio.d.ts", "require": "./dist/main/minio.js", "default": "./dist/esm/minio.mjs" }, From 955efa84c96e7ca04bda0cdc56fa285a06f556a1 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 21:53:22 +0800 Subject: [PATCH 17/78] refactor --- src/extensions.ts | 4 ++-- src/minio.ts | 5 +---- src/notification.ts | 6 +++--- src/typed-client.ts | 8 ++++---- src/typed-client2.ts | 6 +++--- src/{client.ts => typedBase.ts} | 4 ++-- src/upload.ts | 0 7 files changed, 15 insertions(+), 18 deletions(-) rename src/{client.ts => typedBase.ts} (99%) delete mode 100644 src/upload.ts diff --git a/src/extensions.ts b/src/extensions.ts index 8e018ce1..eccfd99e 100644 --- a/src/extensions.ts +++ b/src/extensions.ts @@ -19,13 +19,13 @@ import * as stream from 'node:stream' import * as errors from './errors.ts' import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.ts' import * as transformers from './transformers.ts' -import type { TypedClient2 } from './typed-client2.ts' +import type { Client } from './typed-client2.ts' // TODO type S3Object = unknown export class extensions { - constructor(readonly client: TypedClient2) {} + constructor(readonly client: Client) {} // List the objects in the bucket using S3 ListObjects V2 With Metadata // diff --git a/src/minio.ts b/src/minio.ts index 9ccad980..2f6d590b 100644 --- a/src/minio.ts +++ b/src/minio.ts @@ -14,8 +14,6 @@ * limitations under the License. */ -import { TypedClient2 } from './typed-client2.ts' - export { AssumeRoleProvider } from './AssumeRoleProvider.ts' export { CopyConditions } from './copyConditions.ts' export { CredentialProvider } from './CredentialProvider.ts' @@ -37,5 +35,4 @@ export { TopicConfig, } from './notification.ts' export { PostPolicy } from './postPolicy.ts' - -export class Client extends TypedClient2 {} +export { Client } from './typed-client2.ts' diff --git a/src/notification.ts b/src/notification.ts index 1b26db23..ed0cda96 100644 --- a/src/notification.ts +++ b/src/notification.ts @@ -18,8 +18,8 @@ import { EventEmitter } from 'node:events' import jsonLineParser from 'stream-json/jsonl/Parser.js' -import type { Client } from './client.ts' import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.ts' +import type { TypedBase } from './typedBase.ts' // TODO: type this @@ -151,14 +151,14 @@ export type NotificationRecord = unknown // Listening constitutes repeatedly requesting s3 whether or not any // changes have occurred. export class NotificationPoller extends EventEmitter { - private client: Client + private client: TypedBase private bucketName: string private prefix: string private suffix: string private events: NotificationEvent[] private ending: boolean - constructor(client: Client, bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) { + constructor(client: TypedBase, bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) { super() this.client = client diff --git a/src/typed-client.ts b/src/typed-client.ts index e65c6096..3992b7b9 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -5,8 +5,6 @@ import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' import { fsp } from './async.ts' -import type { RequestMethod, RequestOption } from './client.ts' -import { Client, findCallback } from './client.ts' import * as errors from './errors.ts' import type { MetaData, SelectResults } from './helpers.ts' import { @@ -56,11 +54,13 @@ import type { VersionIdentification, VersioningConfig, } from './type.ts' +import type { RequestMethod, RequestOption } from './typedBase.ts' +import { findCallback, TypedBase } from './typedBase.ts' import type { S3ListObject } from './xml-parsers.ts' import * as xmlParsers from './xml-parsers.ts' import { parseSelectObjectContentResponse } from './xml-parsers.ts' -export class TypedClient extends Client { +export class TypedClient extends TypedBase { getBucketVersioning(bucketName: string, callback: ResultCallback): void getBucketVersioning(bucketName: string): Promise @@ -1578,7 +1578,7 @@ export class TypedClient extends Client { } export class Helper { - constructor(private readonly client: Client) {} + constructor(private readonly client: TypedBase) {} async MultipleFileUpload( bucketName: string, diff --git a/src/typed-client2.ts b/src/typed-client2.ts index dceb2734..56cbd58b 100644 --- a/src/typed-client2.ts +++ b/src/typed-client2.ts @@ -6,8 +6,6 @@ import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' import { fsp } from './async.ts' -import type { RequestOption } from './client.ts' -import { findCallback, uploadStream } from './client.ts' import { CopyConditions } from './copyConditions.ts' import * as errors from './errors.ts' import type { MetaData } from './helpers.ts' @@ -55,6 +53,8 @@ import type { UploadedObjectInfo, } from './type.ts' import { TypedClient } from './typed-client.ts' +import type { RequestOption } from './typedBase.ts' +import { findCallback, uploadStream } from './typedBase.ts' import type { ObjectLockConfig, S3ListObject } from './xml-parsers.ts' import * as xmlParsers from './xml-parsers.ts' @@ -66,7 +66,7 @@ type PartConfig = { headers: RequestHeaders } -export class TypedClient2 extends TypedClient { +export class Client extends TypedClient { // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object protected copyObjectV1( bucketName: string, diff --git a/src/client.ts b/src/typedBase.ts similarity index 99% rename from src/client.ts rename to src/typedBase.ts index 64361ef5..80bddbba 100644 --- a/src/client.ts +++ b/src/typedBase.ts @@ -134,7 +134,7 @@ export function findCallback(args: u return [args.slice(0, index) as A, args[index] as T] } -export class Client { +export class TypedBase { protected transport: typeof http | typeof https protected host: string protected port: number @@ -1982,7 +1982,7 @@ export async function uploadStream({ stream: source, partSize, }: { - client: Client + client: TypedBase bucketName: string objectName: string metaData: MetaData diff --git a/src/upload.ts b/src/upload.ts deleted file mode 100644 index e69de29b..00000000 From 97bd96b1aaa5034efd9cd21dced54c0c1f778288 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 22:09:02 +0800 Subject: [PATCH 18/78] assert --- package-lock.json | 14 ++ package.json | 1 + src/as-callback.ts | 2 +- src/assert.ts | 80 +++++++++++ src/extensions.ts | 3 +- src/helpers.ts | 190 ++++++++++++--------------- src/postPolicy.ts | 3 +- src/s3-endpoints.ts | 2 +- src/signing.ts | 5 +- src/transformers.ts | 2 +- src/typed-client.ts | 17 +-- src/typed-client2.ts | 23 ++-- src/typedBase.ts | 21 +-- src/xml-parsers.ts | 2 +- tests/functional/functional-tests.js | 8 +- tests/unit/test.js | 3 +- 16 files changed, 231 insertions(+), 145 deletions(-) create mode 100644 src/assert.ts diff --git a/package-lock.json b/package-lock.json index 40cf1ea4..914b42a8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -20,6 +20,7 @@ "mime-types": "^2.1.14", "mkdirp": "^3.0.1", "query-string": "^7.1.1", + "stream-json": "^1.7.5", "through2": "^4.0.2", "web-encoding": "^1.1.5", "xml": "^1.0.0", @@ -7177,6 +7178,19 @@ "node": ">=6" } }, + "node_modules/stream-chain": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/stream-chain/-/stream-chain-2.2.5.tgz", + "integrity": "sha512-1TJmBx6aSWqZ4tx7aTpBDXK0/e2hhcNSTV8+CbFJtDjbb+I1mZ8lHit0Grw9GRT+6JbIrrDd8esncgBi8aBXGA==" + }, + "node_modules/stream-json": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/stream-json/-/stream-json-1.7.5.tgz", + "integrity": "sha512-NSkoVduGakxZ8a+pTPUlcGEeAGQpWL9rKJhOFCV+J/QtdQUEU5vtBgVg6eJXn8JB8RZvpbJWZGvXkhz70MLWoA==", + "dependencies": { + "stream-chain": "^2.2.5" + } + }, "node_modules/strict-uri-encode": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", diff --git a/package.json b/package.json index a6ba156c..4f274234 100644 --- a/package.json +++ b/package.json @@ -94,6 +94,7 @@ "mime-types": "^2.1.14", "mkdirp": "^3.0.1", "query-string": "^7.1.1", + "stream-json": "^1.7.5", "through2": "^4.0.2", "web-encoding": "^1.1.5", "xml": "^1.0.0", diff --git a/src/as-callback.ts b/src/as-callback.ts index a829d8b3..c67b94a0 100644 --- a/src/as-callback.ts +++ b/src/as-callback.ts @@ -1,4 +1,4 @@ -import { isFunction } from './helpers.ts' +import { isFunction } from './assert.ts' export function asCallback( cb: undefined | ((err: unknown | null, result: T) => void), diff --git a/src/assert.ts b/src/assert.ts new file mode 100644 index 00000000..521f94dd --- /dev/null +++ b/src/assert.ts @@ -0,0 +1,80 @@ +/** + * @internal + * + * assert js types + * + */ +import type * as stream from 'node:stream' + +import _ from 'lodash' + +/** + * check if typeof arg number + */ +export function isNumber(arg: unknown): arg is number { + return typeof arg === 'number' +} + +export type AnyFunction = (...args: any[]) => any + +/** + * check if typeof arg function + */ +export function isFunction(arg: unknown): arg is AnyFunction { + return typeof arg === 'function' +} + +/** + * check if typeof arg function or undefined + */ +export function isOptionalFunction(arg: unknown): arg is undefined | AnyFunction { + if (arg === undefined) { + return true + } + return typeof arg === 'function' +} + +/** + * check if typeof arg string + */ +export function isString(arg: unknown): arg is string { + return typeof arg === 'string' +} + +/** + * check if typeof arg object + */ +export function isObject(arg: unknown): arg is object { + return typeof arg === 'object' && arg !== null +} + +/** + * check if object is readable stream + */ +export function isReadableStream(arg: unknown): arg is stream.Readable { + // eslint-disable-next-line @typescript-eslint/unbound-method + return isObject(arg) && isFunction((arg as stream.Readable)._read) +} + +/** + * check if arg is boolean + */ +export function isBoolean(arg: unknown): arg is boolean { + return typeof arg === 'boolean' +} + +export function isEmpty(o: unknown): o is null | undefined { + return _.isEmpty(o) +} + +export function isEmptyObject(o: Record): boolean { + return Object.values(o).filter((x) => x !== undefined).length !== 0 +} + +/** + * check if arg is a valid date + */ +export function isValidDate(arg: unknown): arg is Date { + // @ts-expect-error TS(2345): Argument of type 'Date' is not assignable to param... Remove this comment to see the full error message + return arg instanceof Date && !isNaN(arg) +} diff --git a/src/extensions.ts b/src/extensions.ts index eccfd99e..e38f1f4c 100644 --- a/src/extensions.ts +++ b/src/extensions.ts @@ -16,8 +16,9 @@ import * as stream from 'node:stream' +import { isBoolean, isNumber, isString } from './assert.ts' import * as errors from './errors.ts' -import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.ts' +import { isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.ts' import * as transformers from './transformers.ts' import type { Client } from './typed-client2.ts' diff --git a/src/helpers.ts b/src/helpers.ts index 35dae480..ccc309ee 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -26,6 +26,7 @@ import ipaddr from 'ipaddr.js' import _ from 'lodash' import mime from 'mime-types' +import { isEmpty, isEmptyObject, isNumber, isObject, isString } from './assert.ts' import * as errors from './errors.ts' import { qs } from './qs.ts' import type { Binary, Mode } from './type.ts' @@ -33,9 +34,11 @@ import type { Binary, Mode } from './type.ts' export type MetaData = Record export type Header = Record -// All characters in string which are NOT unreserved should be percent encoded. -// Unreserved characers are : ALPHA / DIGIT / "-" / "." / "_" / "~" -// Reference https://tools.ietf.org/html/rfc3986#section-2.2 +/** + * All characters in string which are NOT unreserved should be percent encoded. + * Unreserved characters are : ALPHA / DIGIT / "-" / "." / "_" / "~" + * Reference https://tools.ietf.org/html/rfc3986#section-2.2 + */ export function uriEscape(string: string) { return string.split('').reduce((acc: string, elem: string) => { const buf = Buffer.from(elem) @@ -73,16 +76,20 @@ export function getScope(region: string, date: Date, serviceName = 's3') { return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request` } -// isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' +/** + * isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' + */ export function isAmazonEndpoint(endpoint: string) { return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn' } -// isVirtualHostStyle - verify if bucket name is support with virtual -// hosts. bucketNames with periods should be always treated as path -// style if the protocol is 'https:', this is due to SSL wildcard -// limitation. For all other buckets and Amazon S3 endpoint we will -// default to virtual host style. +/** + * isVirtualHostStyle - verify if bucket name is support with virtual + * hosts. bucketNames with periods should be always treated as path + * style if the protocol is 'https:', this is due to SSL wildcard + * limitation. For all other buckets and Amazon S3 endpoint we will + * default to virtual host style. + */ export function isVirtualHostStyle(endpoint: string, protocol: string, bucket: string, pathStyle: boolean) { if (protocol === 'https:' && bucket.includes('.')) { return false @@ -94,12 +101,16 @@ export function isValidIP(ip: string) { return ipaddr.isValid(ip) } -// isValidEndpoint - true if endpoint is valid domain. +/** + * @returns if endpoint is valid domain. + */ export function isValidEndpoint(endpoint: string) { return isValidDomain(endpoint) || isValidIP(endpoint) } -// isValidDomain - true if input host is a valid domain. +/** + * @returns if input host is a valid domain. + */ export function isValidDomain(host: string) { if (!isString(host)) { return false @@ -132,8 +143,15 @@ export function isValidDomain(host: string) { return true } -// Probes contentType using file extensions. -// For example: probeContentType('file.png') returns 'image/png'. +/** + * Probes contentType using file extensions. + * + * @example + * ``` + * // return 'image/png' + * probeContentType('file.png') + * ``` + */ export function probeContentType(path: string) { let contentType = mime.lookup(path) if (!contentType) { @@ -142,7 +160,9 @@ export function probeContentType(path: string) { return contentType } -// isValidPort - is input port valid. +/** + * is input port valid. + */ export function isValidPort(port: unknown): port is number { // verify if port is a number. if (!isNumber(port)) { @@ -177,29 +197,31 @@ export function isValidBucketName(bucket: unknown) { return false } // bucket cannot have ip address style. - if (bucket.match(/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/)) { + if (/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/.test(bucket)) { return false } // bucket should begin with alphabet/number and end with alphabet/number, // with alphabet/number/.- in the middle. - if (bucket.match(/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/)) { + if (/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/.test(bucket)) { return true } return false } -// check if objectName is a valid object name +/** + * check if objectName is a valid object name + */ export function isValidObjectName(objectName: unknown) { if (!isValidPrefix(objectName)) { return false } - if (objectName.length === 0) { - return false - } - return true + + return objectName.length !== 0 } -// check if prefix is valid +/** + * check if prefix is valid + */ export function isValidPrefix(prefix: unknown): prefix is string { if (!isString(prefix)) { return false @@ -210,68 +232,9 @@ export function isValidPrefix(prefix: unknown): prefix is string { return true } -// check if typeof arg number -export function isNumber(arg: unknown): arg is number { - return typeof arg === 'number' -} - -export type AnyFunction = (...args: any[]) => any - -// check if typeof arg function -export function isFunction(arg: unknown): arg is AnyFunction { - return typeof arg === 'function' -} - -// check if typeof arg function or undefined -export function isOptionalFunction(arg: unknown): arg is undefined | AnyFunction { - if (arg === undefined) { - return true - } - return typeof arg === 'function' -} - -// check if typeof arg string -export function isString(arg: unknown): arg is string { - return typeof arg === 'string' -} - -// check if typeof arg object -export function isObject(arg: unknown): arg is object { - return typeof arg === 'object' && arg !== null -} - -// check if object is readable stream -export function isReadableStream(arg: unknown): arg is stream.Readable { - // eslint-disable-next-line @typescript-eslint/unbound-method - return isObject(arg) && isFunction((arg as stream.Readable)._read) -} - -// check if arg is boolean -export function isBoolean(arg: unknown): arg is boolean { - return typeof arg === 'boolean' -} - -// check if arg is array -export function isArray(arg: unknown): arg is Array { - return Array.isArray(arg) -} - -export function isEmpty(o: unknown): o is null | undefined { - return _.isEmpty(o) -} - -export function isEmptyObject(o: Record): boolean { - return Object.values(o).filter((x) => x !== undefined).length !== 0 -} - -// check if arg is a valid date -export function isValidDate(arg: unknown): arg is Date { - // @ts-expect-error TS(2345): Argument of type 'Date' is not assignable to param... Remove this comment to see the full error message - return arg instanceof Date && !isNaN(arg) -} - -// Create a Date string with format: -// 'YYYYMMDDTHHmmss' + Z +/** + * Create a Date string with format: 'YYYYMMDDTHHmmss' + Z + */ export function makeDateLong(date?: Date): string { date = date || new Date() @@ -281,8 +244,9 @@ export function makeDateLong(date?: Date): string { return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 13) + s.slice(14, 16) + s.slice(17, 19) + 'Z' } -// Create a Date string with format: -// 'YYYYMMDD' +/** + * Create a Date string with format: 'YYYYMMDD' + */ export function makeDateShort(date?: Date) { date = date || new Date() @@ -292,15 +256,19 @@ export function makeDateShort(date?: Date) { return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 10) } -// pipesetup sets up pipe() from left to right os streams array -// pipesetup will also make sure that error emitted at any of the upstream Stream -// will be emitted at the last stream. This makes error handling simple +/** + * pipesetup sets up pipe() from left to right os streams array + * pipesetup will also make sure that error emitted at any of the upstream Stream + * will be emitted at the last stream. This makes error handling simple + */ export function pipesetup(src: stream.Readable, dst: stream.Writable) { src.on('error', (err: unknown) => dst.emit('error', err)) return src.pipe(dst) } -// return a Readable stream that emits data +/** + * return a Readable stream that emits data + */ export function readableStream(data: unknown): stream.Readable { const s = new stream.Readable() s._read = () => {} @@ -309,7 +277,9 @@ export function readableStream(data: unknown): stream.Readable { return s } -// Process metadata to insert appropriate value to `content-type` attribute +/** + * Process metadata to insert appropriate value to `content-type` attribute + */ export function insertContentType(metaData: MetaData, filePath: string) { // check if content-type attribute present in metaData for (const key in metaData) { @@ -324,7 +294,9 @@ export function insertContentType(metaData: MetaData, filePath: string) { return newMetadata } -// Function prepends metadata with the appropriate prefix if it is not already on +/** + * Function prepends metadata with the appropriate prefix if it is not already on + */ export function prependXAMZMeta(metaData?: MetaData) { if (!metaData) { return {} @@ -340,7 +312,9 @@ export function prependXAMZMeta(metaData?: MetaData) { return newMetadata } -// Checks if it is a valid header according to the AmazonS3 API +/** + * Checks if it is a valid header according to the AmazonS3 API + */ export function isAmzHeader(key: string) { const temp = key.toLowerCase() return ( @@ -351,7 +325,9 @@ export function isAmzHeader(key: string) { ) } -// Checks if it is a supported Header +/** + * Checks if it is a supported Header + */ export function isSupportedHeader(key: string) { const supported_headers = [ 'content-type', @@ -364,7 +340,9 @@ export function isSupportedHeader(key: string) { return supported_headers.includes(key.toLowerCase()) } -// Checks if it is a storage header +/** + * Checks if it is a storage header + */ export function isStorageClassHeader(key: string) { return key.toLowerCase() === 'x-amz-storage-class' } @@ -433,9 +411,11 @@ export function toSha256(payload: Binary | Uint8Array): string { return crypto.createHash('sha256').update(payload).digest('hex') } -// toArray returns a single element array with param being the element, -// if param is just a string, and returns 'param' back if it is an array -// So, it makes sure param is always an array +/** + * toArray returns a single element array with param being the element, + * if param is just a string, and returns 'param' back if it is an array + * So, it makes sure param is always an array + */ export function toArray(param: T | T[]): Array { if (!Array.isArray(param)) { return [param] as T[] @@ -745,10 +725,12 @@ export function partsRequired(size: number): number { return requiredPartSize } -// calculateEvenSplits - computes splits for a source and returns -// start and end index slices. Splits happen evenly to be sure that no -// part is less than 5MiB, as that could fail the multipart request if -// it is not the last part. +/** + * calculateEvenSplits - computes splits for a source and returns + * start and end index slices. Splits happen evenly to be sure that no + * part is less than 5MiB, as that could fail the multipart request if + * it is not the last part. + */ export function calculateEvenSplits(size: number, objInfo: { Start?: unknown; Bucket: string; Object: string }) { if (size === 0) { return null @@ -818,7 +800,9 @@ export function parseXml(xml: string): any { return result } -// maybe this should be a generic type for Records, leave it for later refactor +/** + * maybe this should be a generic type for Records, leave it for later refactor + */ export class SelectResults { private records?: unknown private response?: unknown diff --git a/src/postPolicy.ts b/src/postPolicy.ts index a1092e50..ef47d1b7 100644 --- a/src/postPolicy.ts +++ b/src/postPolicy.ts @@ -1,7 +1,8 @@ // Build PostPolicy object that can be signed by presignedPostPolicy +import { isObject } from './assert.ts' import * as errors from './errors.ts' import type { MetaData } from './helpers.ts' -import { isObject, isValidBucketName, isValidObjectName, isValidPrefix } from './helpers.ts' +import { isValidBucketName, isValidObjectName, isValidPrefix } from './helpers.ts' export class PostPolicy { public policy: { conditions: (string | number)[][]; expiration?: string } diff --git a/src/s3-endpoints.ts b/src/s3-endpoints.ts index a3f20e68..5d88fc4b 100644 --- a/src/s3-endpoints.ts +++ b/src/s3-endpoints.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { isString } from './helpers.ts' +import { isString } from './assert.ts' // List of currently supported endpoints. const awsS3Endpoint = { diff --git a/src/signing.ts b/src/signing.ts index 758cec10..b1e9b559 100644 --- a/src/signing.ts +++ b/src/signing.ts @@ -16,8 +16,9 @@ import * as Crypto from 'node:crypto' +import { isNumber, isObject, isString } from './assert.ts' import * as errors from './errors.ts' -import { getScope, isArray, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.ts' +import { getScope, makeDateLong, makeDateShort, uriEscape } from './helpers.ts' import type { ICanonicalRequest, IRequest, RequestHeaders } from './type.ts' const signV4Algorithm = 'AWS4-HMAC-SHA256' @@ -48,7 +49,7 @@ function getCanonicalRequest( if (!isObject(headers)) { throw new TypeError('headers should be of type "object"') } - if (!isArray(signedHeaders)) { + if (!Array.isArray(signedHeaders)) { throw new TypeError('signedHeaders should be of type "array"') } if (!isString(hashedPayload)) { diff --git a/src/transformers.ts b/src/transformers.ts index a387b9f8..9af86e81 100644 --- a/src/transformers.ts +++ b/src/transformers.ts @@ -20,8 +20,8 @@ import type * as stream from 'node:stream' import Through2 from 'through2' +import { isFunction } from './assert.ts' import * as errors from './errors.ts' -import { isFunction } from './helpers.ts' import * as xmlParsers from './xml-parsers.ts' // getConcater returns a stream that concatenates the input and emits diff --git a/src/typed-client.ts b/src/typed-client.ts index 3992b7b9..c8f37691 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -4,13 +4,7 @@ import { TextEncoder } from 'web-encoding' import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' -import { fsp } from './async.ts' -import * as errors from './errors.ts' -import type { MetaData, SelectResults } from './helpers.ts' import { - getScope, - insertContentType, - isArray, isBoolean, isEmpty, isFunction, @@ -18,8 +12,15 @@ import { isObject, isOptionalFunction, isString, - isValidBucketName, isValidDate, +} from './assert.ts' +import { fsp } from './async.ts' +import * as errors from './errors.ts' +import type { MetaData, SelectResults } from './helpers.ts' +import { + getScope, + insertContentType, + isValidBucketName, isValidObjectName, isValidPrefix, LEGAL_HOLD_STATUS, @@ -1512,7 +1513,7 @@ export class TypedClient extends TypedBase { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } - if (!isArray(objectsList)) { + if (!Array.isArray(objectsList)) { throw new errors.InvalidArgumentError('objectsList should be a list') } if (!isOptionalFunction(cb)) { diff --git a/src/typed-client2.ts b/src/typed-client2.ts index 56cbd58b..38ea8f80 100644 --- a/src/typed-client2.ts +++ b/src/typed-client2.ts @@ -5,6 +5,16 @@ import _ from 'lodash' import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' +import { + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, +} from './assert.ts' import { fsp } from './async.ts' import { CopyConditions } from './copyConditions.ts' import * as errors from './errors.ts' @@ -16,15 +26,6 @@ import { extractMetadata, getSourceVersionId, getVersionId, - isArray, - isBoolean, - isEmpty, - isFunction, - isNumber, - isObject, - isOptionalFunction, - isReadableStream, - isString, isValidBucketName, isValidObjectName, isValidPrefix, @@ -242,7 +243,7 @@ export class Client extends TypedClient { const me = this // many async flows. so store the ref. const sourceFilesLength = sourceObjList.length - if (!isArray(sourceObjList)) { + if (!Array.isArray(sourceObjList)) { throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') } if (!(destObjConfig instanceof CopyDestinationOptions)) { @@ -636,7 +637,7 @@ export class Client extends TypedClient { if (!isString(suffix)) { throw new TypeError('suffix must be of type string') } - if (!isArray(events)) { + if (!Array.isArray(events)) { throw new TypeError('events must be of type Array') } const listener = new NotificationPoller(this, bucketName, prefix, suffix, events) diff --git a/src/typedBase.ts b/src/typedBase.ts index 80bddbba..0c79a51d 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -14,26 +14,29 @@ import { mkdirp } from 'mkdirp' import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' +import type { AnyFunction } from './assert.ts' +import { + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, +} from './assert.ts' import { fsp, streamPromise } from './async.ts' import { CredentialProvider } from './CredentialProvider.ts' import * as errors from './errors.ts' import { S3Error } from './errors.ts' import { extensions } from './extensions.ts' -import type { AnyFunction, MetaData } from './helpers.ts' +import type { MetaData } from './helpers.ts' import { DEFAULT_REGION, extractMetadata, getVersionId, insertContentType, isAmazonEndpoint, - isBoolean, - isEmpty, - isFunction, - isNumber, - isObject, - isOptionalFunction, - isReadableStream, - isString, isValidBucketName, isValidEndpoint, isValidObjectName, diff --git a/src/xml-parsers.ts b/src/xml-parsers.ts index c1ae330e..31817417 100644 --- a/src/xml-parsers.ts +++ b/src/xml-parsers.ts @@ -17,10 +17,10 @@ import * as newCrc32 from 'crc-32' import { XMLParser } from 'fast-xml-parser' +import { isObject } from './assert.ts' import * as errors from './errors.ts' import type { MetaData, RETENTION_MODES } from './helpers.ts' import { - isObject, parseXml, RETENTION_VALIDITY_UNITS, sanitizeETag, diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index a27b5ebd..166febf1 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -31,7 +31,7 @@ import superagent from 'superagent' import * as uuid from 'uuid' import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.ts' -import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, getVersionId, isArray } from '../../src/helpers.ts' +import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, getVersionId } from '../../src/helpers.ts' import { removeDirAndFiles } from '../../src/helpers.ts' import * as minio from '../../src/minio.ts' @@ -2453,7 +2453,7 @@ describe('functional tests', function () { if (err) { return done(err) } - if (isArray(tagList)) { + if (Array.isArray(tagList)) { done() } }) @@ -2513,7 +2513,7 @@ describe('functional tests', function () { if (err) { return done(err) } - if (isArray(tagList)) { + if (Array.isArray(tagList)) { done() } }) @@ -2610,7 +2610,7 @@ describe('functional tests', function () { if (err) { return done(err) } - if (isArray(tagList)) { + if (Array.isArray(tagList)) { done() } }) diff --git a/tests/unit/test.js b/tests/unit/test.js index e540fd12..c48ee83d 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -23,7 +23,6 @@ import { calculateEvenSplits, CopyDestinationOptions, CopySourceOptions, - isArray, isValidEndpoint, isValidIP, makeDateLong, @@ -174,7 +173,7 @@ describe('Helpers', () => { const fnResult = calculateEvenSplits(testCase.size, testCase) const { startIndex, endIndex } = fnResult || {} - if (isArray(startIndex) && isArray(endIndex)) { + if (Array.isArray(startIndex) && Array.isArray(endIndex)) { const isExpectedResult = startIndex.length === testCase.expectedStart.length && endIndex.length === testCase.expectedEnd.length assert.equal(isExpectedResult, true) From 4dd8ae44303ba320f78d1e438ea521f2e591ea03 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 22:30:13 +0800 Subject: [PATCH 19/78] refactor --- src/AssumeRoleProvider.ts | 9 +++------ src/CredentialProvider.ts | 10 +--------- src/Credentials.ts | 18 +++++------------- src/helpers.ts | 15 +++------------ src/signing.ts | 2 +- src/typedBase.ts | 28 +++++++++------------------- 6 files changed, 22 insertions(+), 60 deletions(-) diff --git a/src/AssumeRoleProvider.ts b/src/AssumeRoleProvider.ts index 6d047563..f0bc84d2 100644 --- a/src/AssumeRoleProvider.ts +++ b/src/AssumeRoleProvider.ts @@ -79,7 +79,7 @@ export class AssumeRoleProvider extends CredentialProvider { action?: string transportAgent?: http.Agent }) { - super({}) + super({ accessKey, secretKey, sessionToken }) this.stsEndpoint = stsEndpoint this.accessKey = accessKey @@ -226,11 +226,8 @@ export class AssumeRoleProvider extends CredentialProvider { this.accessExpiresAt = expiresAt - const newCreds = new Credentials({ - accessKey, - secretKey, - sessionToken, - }) + // @ts-expect-error not sure if this could be undefined + const newCreds = new Credentials({ accessKey, secretKey, sessionToken }) this.setCredentials(newCreds) return this._credentials diff --git a/src/CredentialProvider.ts b/src/CredentialProvider.ts index 99aaebb0..ba4df896 100644 --- a/src/CredentialProvider.ts +++ b/src/CredentialProvider.ts @@ -3,15 +3,7 @@ import { Credentials } from './Credentials.ts' export class CredentialProvider { private credentials: Credentials - constructor({ - accessKey, - secretKey, - sessionToken, - }: { - accessKey?: string - secretKey?: string - sessionToken?: string - }) { + constructor({ accessKey, secretKey, sessionToken }: { accessKey: string; secretKey: string; sessionToken: string }) { this.credentials = new Credentials({ accessKey, secretKey, diff --git a/src/Credentials.ts b/src/Credentials.ts index 78e07388..48819c73 100644 --- a/src/Credentials.ts +++ b/src/Credentials.ts @@ -1,17 +1,9 @@ export class Credentials { - public accessKey?: string - public secretKey?: string - public sessionToken?: string - - constructor({ - accessKey, - secretKey, - sessionToken, - }: { - accessKey?: string - secretKey?: string - sessionToken?: string - }) { + public accessKey: string + public secretKey: string + public sessionToken: string + + constructor({ accessKey, secretKey, sessionToken }: { accessKey: string; secretKey: string; sessionToken: string }) { this.accessKey = accessKey this.secretKey = secretKey this.sessionToken = sessionToken diff --git a/src/helpers.ts b/src/helpers.ts index ccc309ee..cfa635d9 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -168,18 +168,9 @@ export function isValidPort(port: unknown): port is number { if (!isNumber(port)) { return false } - // port cannot be negative. - if (port < 0) { - return false - } - // port '0' is valid and special case return true. - if (port === 0) { - return true - } - const min_port = 1 - const max_port = 65535 - // Verify if port is in range. - return port >= min_port && port <= max_port + + // port `0` is valid and special case + return 0 <= port && port <= 65535 } export function isValidBucketName(bucket: unknown) { diff --git a/src/signing.ts b/src/signing.ts index b1e9b559..ceeb3d6c 100644 --- a/src/signing.ts +++ b/src/signing.ts @@ -295,7 +295,7 @@ export function presignSignatureV4( const credential = getCredential(accessKey, region, requestDate) const hashedPayload = 'UNSIGNED-PAYLOAD' - const requestQuery = [] + const requestQuery: string[] = [] requestQuery.push(`X-Amz-Algorithm=${signV4Algorithm}`) requestQuery.push(`X-Amz-Credential=${uriEscape(credential)}`) requestQuery.push(`X-Amz-Date=${iso8601Date}`) diff --git a/src/typedBase.ts b/src/typedBase.ts index 0c79a51d..9313574f 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -251,9 +251,6 @@ export class TypedBase { this.host = host this.port = port this.protocol = protocol - this.accessKey = params.accessKey - this.secretKey = params.secretKey - this.sessionToken = params.sessionToken this.userAgent = `${libraryAgent}` // Default path style is true @@ -263,12 +260,9 @@ export class TypedBase { this.pathStyle = params.pathStyle } - if (!this.accessKey) { - this.accessKey = '' - } - if (!this.secretKey) { - this.secretKey = '' - } + this.accessKey = params.accessKey ?? '' + this.secretKey = params.secretKey ?? '' + this.sessionToken = params.sessionToken this.anonymous = !this.accessKey || !this.secretKey if (params.credentialsProvider) { @@ -1553,17 +1547,13 @@ export class TypedBase { private async fetchCredentials() { if (this.credentialsProvider) { - const credentialsConf = await this.credentialsProvider.getCredentials() - if (credentialsConf) { - // @ts-expect-error secretKey maybe undefined - this.accessKey = credentialsConf.getAccessKey() - // @ts-expect-error secretKey maybe undefined - this.secretKey = credentialsConf.getSecretKey() - this.sessionToken = credentialsConf.getSessionToken() + const credential = await this.credentialsProvider.getCredentials() + if (credential) { + this.accessKey = credential.getAccessKey() + this.secretKey = credential.getSecretKey() + this.sessionToken = credential.getSessionToken() } else { - throw new Error( - `Unable to get credentials. Expected instance of BaseCredentialsProvider, get ${credentialsConf}`, - ) + throw new Error(`Unable to get credentials. Expected instance of BaseCredentialsProvider, get ${credential}`) } } else { throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') From 8df43cdbe93c2206ccb60c4ddf654470b44c148e Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 22:34:50 +0800 Subject: [PATCH 20/78] refactor --- src/AssumeRoleProvider.ts | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/src/AssumeRoleProvider.ts b/src/AssumeRoleProvider.ts index f0bc84d2..3836ee47 100644 --- a/src/AssumeRoleProvider.ts +++ b/src/AssumeRoleProvider.ts @@ -16,13 +16,13 @@ type CredentialResponse = { } } - AssumeRoleResponse?: { - AssumeRoleResult?: { - Credentials?: { - AccessKeyId: string | undefined - SecretAccessKey: string | undefined - SessionToken: string | undefined - Expiration: string | undefined + AssumeRoleResponse: { + AssumeRoleResult: { + Credentials: { + AccessKeyId: string + SecretAccessKey: string + SessionToken: string + Expiration: string } } } @@ -203,7 +203,7 @@ export class AssumeRoleProvider extends CredentialProvider { return parseXml(body) } - parseCredentials(respObj: CredentialResponse = {}) { + parseCredentials(respObj: CredentialResponse) { if (respObj.ErrorResponse) { throw new Error( `Unable to obtain credentials: ${respObj.ErrorResponse?.Error?.Code} ${respObj.ErrorResponse?.Error?.Message}`, @@ -215,21 +215,20 @@ export class AssumeRoleProvider extends CredentialProvider { AssumeRoleResponse: { AssumeRoleResult: { Credentials: { - AccessKeyId: accessKey = undefined, - SecretAccessKey: secretKey = undefined, - SessionToken: sessionToken = undefined, - Expiration: expiresAt = null, - } = {}, - } = {}, - } = {}, + AccessKeyId: accessKey, + SecretAccessKey: secretKey, + SessionToken: sessionToken, + Expiration: expiresAt, + }, + }, + }, } = respObj this.accessExpiresAt = expiresAt - // @ts-expect-error not sure if this could be undefined - const newCreds = new Credentials({ accessKey, secretKey, sessionToken }) + const credentials = new Credentials({ accessKey, secretKey, sessionToken }) - this.setCredentials(newCreds) + this.setCredentials(credentials) return this._credentials } From f6551a65ce9bbb50d9d6ce840d6719093615c129 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 2 May 2023 22:35:16 +0800 Subject: [PATCH 21/78] set readonly --- src/AssumeRoleProvider.ts | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/AssumeRoleProvider.ts b/src/AssumeRoleProvider.ts index 3836ee47..fe29d864 100644 --- a/src/AssumeRoleProvider.ts +++ b/src/AssumeRoleProvider.ts @@ -29,24 +29,24 @@ type CredentialResponse = { } export class AssumeRoleProvider extends CredentialProvider { - private stsEndpoint: string - private accessKey: string - private secretKey: string - private durationSeconds: number + private readonly stsEndpoint: string + private readonly accessKey: string + private readonly secretKey: string + private readonly durationSeconds: number private sessionToken: string - private policy: string - private region: string - private roleArn: string - private roleSessionName: string - private externalId: string - private token: string - private webIdentityToken: string - private action: string + private readonly policy: string + private readonly region: string + private readonly roleArn: string + private readonly roleSessionName: string + private readonly externalId: string + private readonly token: string + private readonly webIdentityToken: string + private readonly action: string private _credentials: Credentials | null private expirySeconds: number | null private accessExpiresAt: string | null - private transportAgent?: http.Agent + private readonly transportAgent?: http.Agent constructor({ stsEndpoint, From 21e070a2b00b9330c3fea0f08b8f08636072549f Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 14:47:13 +0800 Subject: [PATCH 22/78] migrate helpers.js to ts --- src/AssumeRoleProvider.js | 2 +- src/base-error.ts | 30 -- src/errors.ts | 15 +- src/extensions.js | 2 +- src/{helpers.js => helpers.ts} | 637 +++++++++++++++------------ src/minio.js | 13 +- src/notification.js | 2 +- src/object-uploader.js | 2 +- src/promisify.js | 31 ++ src/qs.ts | 7 + src/s3-endpoints.js | 2 +- src/signing.js | 4 +- src/transformers.js | 2 +- src/type.ts | 3 + src/xml-parsers.js | 2 +- tests/functional/functional-tests.js | 9 +- tests/unit/test.js | 5 +- 17 files changed, 441 insertions(+), 327 deletions(-) delete mode 100644 src/base-error.ts rename src/{helpers.js => helpers.ts} (50%) create mode 100644 src/promisify.js create mode 100644 src/qs.ts create mode 100644 src/type.ts diff --git a/src/AssumeRoleProvider.js b/src/AssumeRoleProvider.js index 25411f85..3a3b343b 100644 --- a/src/AssumeRoleProvider.js +++ b/src/AssumeRoleProvider.js @@ -4,7 +4,7 @@ import { URL, URLSearchParams } from 'node:url' import { CredentialProvider } from './CredentialProvider.js' import { Credentials } from './Credentials.js' -import { makeDateLong, parseXml, toSha256 } from './helpers.js' +import { makeDateLong, parseXml, toSha256 } from './helpers.ts' import { signV4ByServiceName } from './signing.js' export class AssumeRoleProvider extends CredentialProvider { diff --git a/src/base-error.ts b/src/base-error.ts deleted file mode 100644 index d3947b6d..00000000 --- a/src/base-error.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/// - -/** - * @internal - */ -export class ExtendableError extends Error { - constructor(message?: string, opt?: ErrorOptions) { - // error Option {cause?: unknown} is a 'nice to have', - // don't use it internally - super(message, opt) - // set error name, otherwise it's always 'Error' - this.name = this.constructor.name - } -} diff --git a/src/errors.ts b/src/errors.ts index fa6f62fb..adf0bef2 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -14,7 +14,20 @@ * limitations under the License. */ -import { ExtendableError } from './base-error.ts' +/// + +/** + * @internal + */ +class ExtendableError extends Error { + constructor(message?: string, opt?: ErrorOptions) { + // error Option {cause?: unknown} is a 'nice to have', + // don't use it internally + super(message, opt) + // set error name, otherwise it's always 'Error' + this.name = this.constructor.name + } +} /** * AnonymousRequestError is generated for anonymous keys on specific diff --git a/src/extensions.js b/src/extensions.js index 5e04a930..a81a6222 100644 --- a/src/extensions.js +++ b/src/extensions.js @@ -17,7 +17,7 @@ import * as Stream from 'node:stream' import * as errors from './errors.ts' -import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.js' +import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.ts' import * as transformers from './transformers.js' export class extensions { diff --git a/src/helpers.js b/src/helpers.ts similarity index 50% rename from src/helpers.js rename to src/helpers.ts index 18091089..cd19539d 100644 --- a/src/helpers.js +++ b/src/helpers.ts @@ -14,9 +14,10 @@ * limitations under the License. */ -import * as Crypto from 'node:crypto' -import * as fs from 'node:fs' -import * as path from 'node:path' +import * as crypto from 'node:crypto' +import fs from 'node:fs' +import type { IncomingHttpHeaders } from 'node:http' +import path from 'node:path' import * as stream from 'node:stream' import { isBrowser } from 'browser-or-node' @@ -24,50 +25,22 @@ import { XMLParser } from 'fast-xml-parser' import ipaddr from 'ipaddr.js' import _ from 'lodash' import mime from 'mime-types' -import querystring from 'query-string' import * as errors from './errors.ts' +import { qs } from './qs.ts' +import type { Binary, Mode } from './type.ts' -const fxp = new XMLParser() - -// Returns a wrapper function that will promisify a given callback function. -// It will preserve 'this'. -export function promisify(fn) { - return function () { - // If the last argument is a function, assume its the callback. - let callback = arguments[arguments.length - 1] - - // If the callback is given, don't promisify, just pass straight in. - if (typeof callback === 'function') { - return fn.apply(this, arguments) - } - - // Otherwise, create a new set of arguments, and wrap - // it in a promise. - let args = [...arguments] - - return new Promise((resolve, reject) => { - // Add the callback function. - args.push((err, value) => { - if (err) { - return reject(err) - } - - resolve(value) - }) +export type MetaData = Record +export type Header = Record - // Call the function with our special adaptor callback added. - fn.apply(this, args) - }) - } -} - -// All characters in string which are NOT unreserved should be percent encoded. -// Unreserved characers are : ALPHA / DIGIT / "-" / "." / "_" / "~" -// Reference https://tools.ietf.org/html/rfc3986#section-2.2 -export function uriEscape(string) { - return string.split('').reduce((acc, elem) => { - let buf = Buffer.from(elem) +/** + * All characters in string which are NOT unreserved should be percent encoded. + * Unreserved characters are : ALPHA / DIGIT / "-" / "." / "_" / "~" + * Reference https://tools.ietf.org/html/rfc3986#section-2.2 + */ +export function uriEscape(string: string) { + return string.split('').reduce((acc: string, elem: string) => { + const buf = Buffer.from(elem) if (buf.length === 1) { // length 1 indicates that elem is not a unicode character. // Check if it is an unreserved characer. @@ -87,49 +60,57 @@ export function uriEscape(string) { } // elem needs encoding - i.e elem should be encoded if it's not unreserved // character or if it's a unicode character. - for (var i = 0; i < buf.length; i++) { - acc = acc + '%' + buf[i].toString(16).toUpperCase() + for (const char of buf) { + acc = acc + '%' + char.toString(16).toUpperCase() } return acc }, '') } -export function uriResourceEscape(string) { +export function uriResourceEscape(string: string) { return uriEscape(string).replace(/%2F/g, '/') } -export function getScope(region, date, serviceName = 's3') { +export function getScope(region: string, date: Date, serviceName = 's3') { return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request` } -// isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' -export function isAmazonEndpoint(endpoint) { +/** + * isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' + */ +export function isAmazonEndpoint(endpoint: string) { return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn' } -// isVirtualHostStyle - verify if bucket name is support with virtual -// hosts. bucketNames with periods should be always treated as path -// style if the protocol is 'https:', this is due to SSL wildcard -// limitation. For all other buckets and Amazon S3 endpoint we will -// default to virtual host style. -export function isVirtualHostStyle(endpoint, protocol, bucket, pathStyle) { - if (protocol === 'https:' && bucket.indexOf('.') > -1) { +/** + * isVirtualHostStyle - verify if bucket name is support with virtual + * hosts. bucketNames with periods should be always treated as path + * style if the protocol is 'https:', this is due to SSL wildcard + * limitation. For all other buckets and Amazon S3 endpoint we will + * default to virtual host style. + */ +export function isVirtualHostStyle(endpoint: string, protocol: string, bucket: string, pathStyle: boolean) { + if (protocol === 'https:' && bucket.includes('.')) { return false } return isAmazonEndpoint(endpoint) || !pathStyle } -export function isValidIP(ip) { +export function isValidIP(ip: string) { return ipaddr.isValid(ip) } -// isValidEndpoint - true if endpoint is valid domain. -export function isValidEndpoint(endpoint) { +/** + * @returns if endpoint is valid domain. + */ +export function isValidEndpoint(endpoint: string) { return isValidDomain(endpoint) || isValidIP(endpoint) } -// isValidDomain - true if input host is a valid domain. -export function isValidDomain(host) { +/** + * @returns if input host is a valid domain. + */ +export function isValidDomain(host: string) { if (!isString(host)) { return false } @@ -149,10 +130,10 @@ export function isValidDomain(host) { if (host[0] === '.') { return false } - var alphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:> -1) { + for (const char of alphaNumerics) { + if (host.includes(char)) { return false } } @@ -161,9 +142,16 @@ export function isValidDomain(host) { return true } -// Probes contentType using file extensions. -// For example: probeContentType('file.png') returns 'image/png'. -export function probeContentType(path) { +/** + * Probes contentType using file extensions. + * + * @example + * ``` + * // return 'image/png' + * probeContentType('file.png') + * ``` + */ +export function probeContentType(path: string) { let contentType = mime.lookup(path) if (!contentType) { contentType = 'application/octet-stream' @@ -171,27 +159,20 @@ export function probeContentType(path) { return contentType } -// isValidPort - is input port valid. -export function isValidPort(port) { +/** + * is input port valid. + */ +export function isValidPort(port: unknown): port is number { // verify if port is a number. if (!isNumber(port)) { return false } - // port cannot be negative. - if (port < 0) { - return false - } - // port '0' is valid and special case return true. - if (port === 0) { - return true - } - var min_port = 1 - var max_port = 65535 - // Verify if port is in range. - return port >= min_port && port <= max_port + + // port `0` is valid and special case + return 0 <= port && port <= 65535 } -export function isValidBucketName(bucket) { +export function isValidBucketName(bucket: unknown) { if (!isString(bucket)) { return false } @@ -202,34 +183,36 @@ export function isValidBucketName(bucket) { return false } // bucket with successive periods is invalid. - if (bucket.indexOf('..') > -1) { + if (bucket.includes('..')) { return false } // bucket cannot have ip address style. - if (bucket.match(/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/)) { + if (/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/.test(bucket)) { return false } // bucket should begin with alphabet/number and end with alphabet/number, // with alphabet/number/.- in the middle. - if (bucket.match(/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/)) { + if (/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/.test(bucket)) { return true } return false } -// check if objectName is a valid object name -export function isValidObjectName(objectName) { +/** + * check if objectName is a valid object name + */ +export function isValidObjectName(objectName: unknown) { if (!isValidPrefix(objectName)) { return false } - if (objectName.length === 0) { - return false - } - return true + + return objectName.length !== 0 } -// check if prefix is valid -export function isValidPrefix(prefix) { +/** + * check if prefix is valid + */ +export function isValidPrefix(prefix: unknown): prefix is string { if (!isString(prefix)) { return false } @@ -239,117 +222,153 @@ export function isValidPrefix(prefix) { return true } -// check if typeof arg number -export function isNumber(arg) { +/** + * check if typeof arg number + */ +export function isNumber(arg: unknown): arg is number { return typeof arg === 'number' } -// check if typeof arg function -export function isFunction(arg) { +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type AnyFunction = (...args: any[]) => any + +/** + * check if typeof arg function + */ +export function isFunction(arg: unknown): arg is AnyFunction { return typeof arg === 'function' } -// check if typeof arg string -export function isString(arg) { +/** + * check if typeof arg string + */ +export function isString(arg: unknown): arg is string { return typeof arg === 'string' } -// check if typeof arg object -export function isObject(arg) { +/** + * check if typeof arg object + */ +export function isObject(arg: unknown): arg is object { return typeof arg === 'object' && arg !== null } -// check if object is readable stream -export function isReadableStream(arg) { - return isObject(arg) && isFunction(arg._read) +/** + * check if object is readable stream + */ +export function isReadableStream(arg: unknown): arg is stream.Readable { + // eslint-disable-next-line @typescript-eslint/unbound-method + return isObject(arg) && isFunction((arg as stream.Readable)._read) } -// check if arg is boolean -export function isBoolean(arg) { +/** + * check if arg is boolean + */ +export function isBoolean(arg: unknown): arg is boolean { return typeof arg === 'boolean' } -// check if arg is array -export function isArray(arg) { - return Array.isArray(arg) +export function isEmpty(o: unknown): o is null | undefined { + return _.isEmpty(o) +} + +export function isEmptyObject(o: Record): boolean { + return Object.values(o).filter((x) => x !== undefined).length !== 0 } -// check if arg is a valid date -export function isValidDate(arg) { +/** + * check if arg is a valid date + */ +export function isValidDate(arg: unknown): arg is Date { + // @ts-expect-error TS(2345): Argument of type 'Date' is not assignable to param... Remove this comment to see the full error message return arg instanceof Date && !isNaN(arg) } -// Create a Date string with format: -// 'YYYYMMDDTHHmmss' + Z -export function makeDateLong(date) { +/** + * Create a Date string with format: 'YYYYMMDDTHHmmss' + Z + */ +export function makeDateLong(date?: Date): string { date = date || new Date() // Gives format like: '2017-08-07T16:28:59.889Z' - date = date.toISOString() + const s = date.toISOString() - return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 13) + date.slice(14, 16) + date.slice(17, 19) + 'Z' + return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 13) + s.slice(14, 16) + s.slice(17, 19) + 'Z' } -// Create a Date string with format: -// 'YYYYMMDD' -export function makeDateShort(date) { +/** + * Create a Date string with format: 'YYYYMMDD' + */ +export function makeDateShort(date?: Date) { date = date || new Date() // Gives format like: '2017-08-07T16:28:59.889Z' - date = date.toISOString() + const s = date.toISOString() - return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 10) + return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 10) } -// pipesetup sets up pipe() from left to right os streams array -// pipesetup will also make sure that error emitted at any of the upstream Stream -// will be emitted at the last stream. This makes error handling simple -export function pipesetup(...streams) { - return streams.reduce((src, dst) => { - src.on('error', (err) => dst.emit('error', err)) - return src.pipe(dst) - }) +/** + * pipesetup sets up pipe() from left to right os streams array + * pipesetup will also make sure that error emitted at any of the upstream Stream + * will be emitted at the last stream. This makes error handling simple + */ +export function pipesetup(src: stream.Readable, dst: stream.Writable) { + src.on('error', (err: unknown) => dst.emit('error', err)) + return src.pipe(dst) } -// return a Readable stream that emits data -export function readableStream(data) { - var s = new stream.Readable() +/** + * return a Readable stream that emits data + */ +export function readableStream(data: unknown): stream.Readable { + const s = new stream.Readable() s._read = () => {} s.push(data) s.push(null) return s } -// Process metadata to insert appropriate value to `content-type` attribute -export function insertContentType(metaData, filePath) { +/** + * Process metadata to insert appropriate value to `content-type` attribute + */ +export function insertContentType(metaData: MetaData, filePath: string) { // check if content-type attribute present in metaData - for (var key in metaData) { + for (const key in metaData) { if (key.toLowerCase() === 'content-type') { return metaData } } // if `content-type` attribute is not present in metadata, // then infer it from the extension in filePath - var newMetadata = Object.assign({}, metaData) + const newMetadata = Object.assign({}, metaData) newMetadata['content-type'] = probeContentType(filePath) return newMetadata } -// Function prepends metadata with the appropriate prefix if it is not already on -export function prependXAMZMeta(metaData) { - var newMetadata = Object.assign({}, metaData) - for (var key in metaData) { - if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageclassHeader(key)) { - newMetadata['X-Amz-Meta-' + key] = newMetadata[key] +/** + * Function prepends metadata with the appropriate prefix if it is not already on + */ +export function prependXAMZMeta(metaData?: MetaData) { + if (!metaData) { + return {} + } + + const newMetadata = Object.assign({}, metaData) + for (const [key, value] of _.entries(metaData)) { + if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageClassHeader(key)) { + newMetadata['X-Amz-Meta-' + key] = value delete newMetadata[key] } } return newMetadata } -// Checks if it is a valid header according to the AmazonS3 API -export function isAmzHeader(key) { - var temp = key.toLowerCase() +/** + * Checks if it is a valid header according to the AmazonS3 API + */ +export function isAmzHeader(key: string) { + const temp = key.toLowerCase() return ( temp.startsWith('x-amz-meta-') || temp === 'x-amz-acl' || @@ -357,9 +376,12 @@ export function isAmzHeader(key) { temp === 'x-amz-server-side-encryption' ) } -// Checks if it is a supported Header -export function isSupportedHeader(key) { - var supported_headers = [ + +/** + * Checks if it is a supported Header + */ +export function isSupportedHeader(key: string) { + const supported_headers = [ 'content-type', 'cache-control', 'content-encoding', @@ -367,20 +389,25 @@ export function isSupportedHeader(key) { 'content-language', 'x-amz-website-redirect-location', ] - return supported_headers.indexOf(key.toLowerCase()) > -1 + return supported_headers.includes(key.toLowerCase()) } -// Checks if it is a storage header -export function isStorageclassHeader(key) { + +/** + * Checks if it is a storage header + */ +export function isStorageClassHeader(key: string) { return key.toLowerCase() === 'x-amz-storage-class' } -export function extractMetadata(metaData) { - var newMetadata = {} - for (var key in metaData) { - if (isSupportedHeader(key) || isStorageclassHeader(key) || isAmzHeader(key)) { +export function extractMetadata(metaData: IncomingHttpHeaders) { + const newMetadata = {} + for (const key in metaData) { + if (isSupportedHeader(key) || isStorageClassHeader(key) || isAmzHeader(key)) { if (key.toLowerCase().startsWith('x-amz-meta-')) { + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message newMetadata[key.slice(11, key.length)] = metaData[key] } else { + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message newMetadata[key] = metaData[key] } } @@ -388,68 +415,70 @@ export function extractMetadata(metaData) { return newMetadata } -export function getVersionId(headers = {}) { - const versionIdValue = headers['x-amz-version-id'] +export function getVersionId(headers: IncomingHttpHeaders = {}) { + const versionIdValue = headers['x-amz-version-id'] as string return versionIdValue || null } -export function getSourceVersionId(headers = {}) { +export function getSourceVersionId(headers: IncomingHttpHeaders = {}) { const sourceVersionId = headers['x-amz-copy-source-version-id'] return sourceVersionId || null } -export function sanitizeETag(etag = '') { - var replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } +export function sanitizeETag(etag = ''): string { + const replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m]) } export const RETENTION_MODES = { GOVERNANCE: 'GOVERNANCE', COMPLIANCE: 'COMPLIANCE', -} +} as const export const RETENTION_VALIDITY_UNITS = { DAYS: 'Days', YEARS: 'Years', -} +} as const export const LEGAL_HOLD_STATUS = { ENABLED: 'ON', DISABLED: 'OFF', -} +} as const -const objectToBuffer = (payload) => { - const payloadBuf = Buffer.from(Buffer.from(payload)) - return payloadBuf +function objectToBuffer(payload: Binary | Uint8Array): Buffer { + // don't know how to write this... + return Buffer.from(payload) } -export const toMd5 = (payload) => { - let payLoadBuf = objectToBuffer(payload) +export function toMd5(payload: Binary | Uint8Array): string { + let payLoadBuf: Binary = objectToBuffer(payload) // use string from browser and buffer from nodejs // browser support is tested only against minio server payLoadBuf = isBrowser ? payLoadBuf.toString() : payLoadBuf - return Crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') + return crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') } -export const toSha256 = (payload) => { - return Crypto.createHash('sha256').update(payload).digest('hex') +export function toSha256(payload: Binary | Uint8Array): string { + return crypto.createHash('sha256').update(payload).digest('hex') } -// toArray returns a single element array with param being the element, -// if param is just a string, and returns 'param' back if it is an array -// So, it makes sure param is always an array -export const toArray = (param) => { +/** + * toArray returns a single element array with param being the element, + * if param is just a string, and returns 'param' back if it is an array + * So, it makes sure param is always an array + */ +export function toArray(param: T | T[]): Array { if (!Array.isArray(param)) { - return [param] + return [param] as T[] } return param } -export const sanitizeObjectKey = (objectName) => { +export function sanitizeObjectKey(objectName: string): string { // + symbol characters are not decoded as spaces in JS. so replace them first and decode to get the correct result. - let asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') - const sanitizedName = decodeURIComponent(asStrName) - return sanitizedName + const asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') + return decodeURIComponent(asStrName) } export const PART_CONSTRAINTS = { @@ -483,23 +512,27 @@ const ENCRYPTION_HEADERS = { sseGenericHeader: GENERIC_SSE_HEADER, // sseKmsKeyID is the AWS SSE-KMS key id. sseKmsKeyID: GENERIC_SSE_HEADER + '-Aws-Kms-Key-Id', -} +} as const /** * Return Encryption headers * @param encConfig * @returns an object with key value pairs that can be used in headers. */ -function getEncryptionHeaders(encConfig) { +function getEncryptionHeaders(encConfig: Encryption): Record { const encType = encConfig.type const encHeaders = {} - if (!_.isEmpty(encType)) { + if (!isEmpty(encType)) { if (encType === ENCRYPTION_TYPES.SSEC) { return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore [encHeaders[ENCRYPTION_HEADERS.sseGenericHeader]]: 'AES256', } } else if (encType === ENCRYPTION_TYPES.KMS) { return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore [ENCRYPTION_HEADERS.sseGenericHeader]: encConfig.SSEAlgorithm, [ENCRYPTION_HEADERS.sseKmsKeyID]: encConfig.KMSMasterKeyID, } @@ -510,16 +543,28 @@ function getEncryptionHeaders(encConfig) { } export class CopySourceOptions { + public readonly Bucket: string + public readonly Object: string + public readonly VersionID: string + public MatchETag: string + private readonly NoMatchETag: string + private readonly MatchModifiedSince: string | null + private readonly MatchUnmodifiedSince: string | null + public readonly MatchRange: boolean + public readonly Start: number + public readonly End: number + private readonly Encryption?: Encryption + /** * - * @param Bucket __string__ Bucket Name - * @param Object __string__ Object Name - * @param VersionID __string__ Valid versionId - * @param MatchETag __string__ Etag to match - * @param NoMatchETag __string__ Etag to exclude - * @param MatchModifiedSince __string__ Modified Date of the object/part. UTC Date in string format - * @param MatchUnmodifiedSince __string__ Modified Date of the object/part to exclude UTC Date in string format - * @param MatchRange __boolean__ true or false Object range to match + * @param Bucket - Bucket Name + * @param Object - Object Name + * @param VersionID - Valid versionId + * @param MatchETag - Etag to match + * @param NoMatchETag - Etag to exclude + * @param MatchModifiedSince - Modified Date of the object/part. UTC Date in string format + * @param MatchUnmodifiedSince - Modified Date of the object/part to exclude UTC Date in string format + * @param MatchRange - true or false Object range to match * @param Start * @param End * @param Encryption @@ -535,7 +580,19 @@ export class CopySourceOptions { MatchRange = false, Start = 0, End = 0, - Encryption = {}, + Encryption = undefined, + }: { + Bucket?: string + Object?: string + VersionID?: string + MatchETag?: string + NoMatchETag?: string + MatchModifiedSince?: string | null + MatchUnmodifiedSince?: string | null + MatchRange?: boolean + Start?: number + End?: number + Encryption?: Encryption } = {}) { this.Bucket = Bucket this.Object = Object @@ -569,24 +626,24 @@ export class CopySourceOptions { } getHeaders() { - let headerOptions = {} + const headerOptions: Header = {} headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) - if (!_.isEmpty(this.VersionID)) { + if (!isEmpty(this.VersionID)) { headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID } - if (!_.isEmpty(this.MatchETag)) { + if (!isEmpty(this.MatchETag)) { headerOptions['x-amz-copy-source-if-match'] = this.MatchETag } - if (!_.isEmpty(this.NoMatchETag)) { + if (!isEmpty(this.NoMatchETag)) { headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag } - if (!_.isEmpty(this.MatchModifiedSince)) { + if (!isEmpty(this.MatchModifiedSince)) { headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince } - if (!_.isEmpty(this.MatchUnmodifiedSince)) { + if (!isEmpty(this.MatchUnmodifiedSince)) { headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince } @@ -594,30 +651,54 @@ export class CopySourceOptions { } } +export type Encryption = { + type: string + SSEAlgorithm?: string + KMSMasterKeyID?: string +} + export class CopyDestinationOptions { - /* - * @param Bucket __string__ - * @param Object __string__ Object Name for the destination (composed/copied) object defaults - * @param Encryption __object__ Encryption configuration defaults to {} - * @param UserMetadata __object__ - * @param UserTags __object__ | __string__ - * @param LegalHold __string__ ON | OFF - * @param RetainUntilDate __string__ UTC Date String + public readonly Bucket: string + public readonly Object: string + private readonly Encryption?: Encryption + private readonly UserMetadata?: MetaData + private readonly UserTags?: Record | string + private readonly LegalHold?: 'on' | 'off' + private readonly RetainUntilDate?: string + private readonly Mode?: Mode + + /** + * @param Bucket - Bucket name + * @param Object - Object Name for the destination (composed/copied) object defaults + * @param Encryption - Encryption configuration defaults to {} + * @param UserMetadata - + * @param UserTags + * @param LegalHold - + * @param RetainUntilDate - UTC Date String * @param Mode */ constructor({ - Bucket = '', - Object = '', - Encryption = null, - UserMetadata = null, - UserTags = null, - LegalHold = null, - RetainUntilDate = null, - Mode = null, // + Bucket, + Object, + Encryption, + UserMetadata, + UserTags, + LegalHold, + RetainUntilDate, + Mode, + }: { + Bucket: string + Object: string + Encryption?: Encryption + UserMetadata?: MetaData + UserTags?: Record | string + LegalHold?: 'on' | 'off' + RetainUntilDate?: string + Mode?: Mode }) { this.Bucket = Bucket this.Object = Object - this.Encryption = Encryption + this.Encryption = Encryption ?? undefined // null input will become undefined, easy for runtime assert this.UserMetadata = UserMetadata this.UserTags = UserTags this.LegalHold = LegalHold @@ -625,47 +706,43 @@ export class CopyDestinationOptions { this.RetainUntilDate = RetainUntilDate } - getHeaders() { + getHeaders(): Record { const replaceDirective = 'REPLACE' - const headerOptions = {} + const headerOptions: Record = {} const userTags = this.UserTags - if (!_.isEmpty(userTags)) { + if (!isEmpty(userTags)) { headerOptions['X-Amz-Tagging-Directive'] = replaceDirective - headerOptions['X-Amz-Tagging'] = isObject(userTags) - ? querystring.stringify(userTags) - : isString(userTags) - ? userTags - : '' + headerOptions['X-Amz-Tagging'] = isObject(userTags) ? qs(userTags) : isString(userTags) ? userTags : '' } - if (!_.isEmpty(this.Mode)) { + if (this.Mode) { headerOptions['X-Amz-Object-Lock-Mode'] = this.Mode // GOVERNANCE or COMPLIANCE } - if (!_.isEmpty(this.RetainUntilDate)) { + if (this.RetainUntilDate) { headerOptions['X-Amz-Object-Lock-Retain-Until-Date'] = this.RetainUntilDate // needs to be UTC. } - if (!_.isEmpty(this.LegalHold)) { + if (this.LegalHold) { headerOptions['X-Amz-Object-Lock-Legal-Hold'] = this.LegalHold // ON or OFF } - if (!_.isEmpty(this.UserMetadata)) { - const headerKeys = Object.keys(this.UserMetadata) - headerKeys.forEach((key) => { - headerOptions[`X-Amz-Meta-${key}`] = this.UserMetadata[key] - }) + if (this.UserMetadata) { + for (const [key, value] of Object.entries(this.UserMetadata)) { + headerOptions[`X-Amz-Meta-${key}`] = value.toString() + } } - if (!_.isEmpty(this.Encryption)) { + if (this.Encryption) { const encryptionHeaders = getEncryptionHeaders(this.Encryption) - Object.keys(encryptionHeaders).forEach((key) => { - headerOptions[key] = encryptionHeaders[key] - }) + for (const [key, value] of Object.entries(encryptionHeaders)) { + headerOptions[key] = value + } } return headerOptions } + validate() { if (!isValidBucketName(this.Bucket)) { throw new errors.InvalidBucketNameError('Invalid Destination bucket name: ' + this.Bucket) @@ -673,25 +750,25 @@ export class CopyDestinationOptions { if (!isValidObjectName(this.Object)) { throw new errors.InvalidObjectNameError(`Invalid Destination object name: ${this.Object}`) } - if (!_.isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) { + if (!isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) { throw new errors.InvalidObjectNameError(`Destination UserMetadata should be an object with key value pairs`) } - if (!_.isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) { + if (!isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) { throw new errors.InvalidObjectNameError( `Invalid Mode specified for destination object it should be one of [GOVERNANCE,COMPLIANCE]`, ) } - if (!_.isEmpty(this.Encryption) && _.isEmpty(this.Encryption)) { + if (this.Encryption !== undefined && isEmptyObject(this.Encryption)) { throw new errors.InvalidObjectNameError(`Invalid Encryption configuration for destination object `) } return true } } -export const partsRequired = (size) => { - let maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) +export function partsRequired(size: number): number { + const maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) let requiredPartSize = size / maxPartSize if (size % maxPartSize > 0) { requiredPartSize++ @@ -700,23 +777,22 @@ export const partsRequired = (size) => { return requiredPartSize } -// calculateEvenSplits - computes splits for a source and returns -// start and end index slices. Splits happen evenly to be sure that no -// part is less than 5MiB, as that could fail the multipart request if -// it is not the last part. - -let startIndexParts = [] -let endIndexParts = [] -export function calculateEvenSplits(size, objInfo) { +/** + * calculateEvenSplits - computes splits for a source and returns + * start and end index slices. Splits happen evenly to be sure that no + * part is less than 5MiB, as that could fail the multipart request if + * it is not the last part. + */ +export function calculateEvenSplits(size: number, objInfo: { Start?: unknown; Bucket: string; Object: string }) { if (size === 0) { return null } const reqParts = partsRequired(size) - startIndexParts = new Array(reqParts) - endIndexParts = new Array(reqParts) + const startIndexParts = new Array(reqParts) + const endIndexParts = new Array(reqParts) - let start = objInfo.Start - if (_.isEmpty(objInfo.Start) || start === -1) { + let start = objInfo.Start as number + if (isEmpty(objInfo.Start) || start === -1) { start = 0 } const divisorValue = Math.trunc(size / reqParts) @@ -732,7 +808,7 @@ export function calculateEvenSplits(size, objInfo) { } const currentStart = nextStart - let currentEnd = currentStart + curPartSize - 1 + const currentEnd = currentStart + curPartSize - 1 nextStart = currentEnd + 1 startIndexParts[i] = currentStart @@ -742,31 +818,32 @@ export function calculateEvenSplits(size, objInfo) { return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo } } -export function removeDirAndFiles(dirPath, removeSelf) { - if (removeSelf === undefined) { - removeSelf = true - } +export function removeDirAndFiles(dirPath: string, removeSelf = true) { + let files try { - var files = fs.readdirSync(dirPath) + files = fs.readdirSync(dirPath) } catch (e) { return } - if (files.length > 0) { - for (var i = 0; i < files.length; i++) { - var filePath = path.join(dirPath, files[i]) - if (fs.statSync(filePath).isFile()) { - fs.unlinkSync(filePath) - } else { - removeDirAndFiles(filePath) - } + + for (const item of files) { + const filePath = path.join(dirPath, item) + if (fs.statSync(filePath).isFile()) { + fs.unlinkSync(filePath) + } else { + removeDirAndFiles(filePath, true) } } + if (removeSelf) { fs.rmdirSync(dirPath) } } -export const parseXml = (xml) => { +const fxp = new XMLParser() + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function parseXml(xml: string): any { let result = null result = fxp.parse(xml) if (result.Error) { @@ -776,12 +853,25 @@ export const parseXml = (xml) => { return result } +/** + * maybe this should be a generic type for Records, leave it for later refactor + */ export class SelectResults { + private records?: unknown + private response?: unknown + private stats?: string + private progress?: unknown + constructor({ records, // parsed data as stream response, // original response stream stats, // stats as xml progress, // stats as xml + }: { + records?: unknown + response?: unknown + stats?: string + progress?: unknown }) { this.records = records this.response = response @@ -789,32 +879,35 @@ export class SelectResults { this.progress = progress } - setStats(stats) { + setStats(stats: string) { this.stats = stats } + getStats() { return this.stats } - setProgress(progress) { + setProgress(progress: unknown) { this.progress = progress } + getProgress() { return this.progress } - setResponse(response) { + setResponse(response: unknown) { this.response = response } + getResponse() { return this.response } - setRecords(records) { + setRecords(records: unknown) { this.records = records } - getRecords() { + getRecords(): unknown { return this.records } } diff --git a/src/minio.js b/src/minio.js index 2567e254..c0df51a1 100644 --- a/src/minio.js +++ b/src/minio.js @@ -43,7 +43,6 @@ import { getVersionId, insertContentType, isAmazonEndpoint, - isArray, isBoolean, isFunction, isNumber, @@ -63,7 +62,6 @@ import { partsRequired, pipesetup, prependXAMZMeta, - promisify, readableStream, RETENTION_MODES, RETENTION_VALIDITY_UNITS, @@ -72,9 +70,10 @@ import { toSha256, uriEscape, uriResourceEscape, -} from './helpers.js' +} from './helpers.ts' import { NotificationConfig, NotificationPoller } from './notification.js' import { ObjectUploader } from './object-uploader.js' +import { promisify } from './promisify.js' import { getS3Endpoint } from './s3-endpoints.js' import { postPresignSignatureV4, presignSignatureV4, signV4 } from './signing.js' import * as transformers from './transformers.js' @@ -83,7 +82,7 @@ import { parseSelectObjectContentResponse } from './xml-parsers.js' // will be replaced by bundler const Package = { version: process.env.MINIO_JS_PACKAGE_VERSION || 'development' } -export * from './helpers.js' +export * from './helpers.ts' export * from './notification.js' export class Client { @@ -1913,7 +1912,7 @@ export class Client { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } - if (!isArray(objectsList)) { + if (!Array.isArray(objectsList)) { throw new errors.InvalidArgumentError('objectsList should be a list') } if (!isFunction(cb)) { @@ -2637,7 +2636,7 @@ export class Client { if (!isString(suffix)) { throw new TypeError('suffix must be of type string') } - if (!isArray(events)) { + if (!Array.isArray(events)) { throw new TypeError('events must be of type Array') } let listener = new NotificationPoller(this, bucketName, prefix, suffix, events) @@ -3543,7 +3542,7 @@ export class Client { const me = this // many async flows. so store the ref. const sourceFilesLength = sourceObjList.length - if (!isArray(sourceObjList)) { + if (!Array.isArray(sourceObjList)) { throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') } if (!(destObjConfig instanceof CopyDestinationOptions)) { diff --git a/src/notification.js b/src/notification.js index 5fe14541..9ceb2b03 100644 --- a/src/notification.js +++ b/src/notification.js @@ -16,7 +16,7 @@ import { EventEmitter } from 'node:events' -import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.js' +import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.ts' import * as transformers from './transformers.js' // Notification config - array of target configs. diff --git a/src/object-uploader.js b/src/object-uploader.js index 2fdf6606..7c3dcaff 100644 --- a/src/object-uploader.js +++ b/src/object-uploader.js @@ -19,7 +19,7 @@ import { Transform } from 'node:stream' import * as querystring from 'query-string' -import { getVersionId, sanitizeETag } from './helpers.js' +import { getVersionId, sanitizeETag } from './helpers.ts' // We extend Transform because Writable does not implement ._flush(). export class ObjectUploader extends Transform { diff --git a/src/promisify.js b/src/promisify.js new file mode 100644 index 00000000..1f68464a --- /dev/null +++ b/src/promisify.js @@ -0,0 +1,31 @@ +// Returns a wrapper function that will promisify a given callback function. +// It will preserve 'this'. +export function promisify(fn) { + return function () { + // If the last argument is a function, assume its the callback. + let callback = arguments[arguments.length - 1] + + // If the callback is given, don't promisify, just pass straight in. + if (typeof callback === 'function') { + return fn.apply(this, arguments) + } + + // Otherwise, create a new set of arguments, and wrap + // it in a promise. + let args = [...arguments] + + return new Promise((resolve, reject) => { + // Add the callback function. + args.push((err, value) => { + if (err) { + return reject(err) + } + + resolve(value) + }) + + // Call the function with our special adaptor callback added. + fn.apply(this, args) + }) + } +} diff --git a/src/qs.ts b/src/qs.ts new file mode 100644 index 00000000..56c17504 --- /dev/null +++ b/src/qs.ts @@ -0,0 +1,7 @@ +import queryString from 'query-string' + +// rfc 3986 encoding. +// `URLSearchParams` and `node:querystring` won't work +export function qs(q: Record): string { + return queryString.stringify(q) +} diff --git a/src/s3-endpoints.js b/src/s3-endpoints.js index aa6a7921..f37d2ba7 100644 --- a/src/s3-endpoints.js +++ b/src/s3-endpoints.js @@ -14,7 +14,7 @@ * limitations under the License. */ -import { isString } from './helpers.js' +import { isString } from './helpers.ts' // List of currently supported endpoints. const awsS3Endpoint = { diff --git a/src/signing.js b/src/signing.js index 247206f6..aaf0251a 100644 --- a/src/signing.js +++ b/src/signing.js @@ -19,7 +19,7 @@ import * as Crypto from 'node:crypto' import _ from 'lodash' import * as errors from './errors.ts' -import { getScope, isArray, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.js' +import { getScope, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.ts' const signV4Algorithm = 'AWS4-HMAC-SHA256' @@ -43,7 +43,7 @@ function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload if (!isObject(headers)) { throw new TypeError('headers should be of type "object"') } - if (!isArray(signedHeaders)) { + if (!Array.isArray(signedHeaders)) { throw new TypeError('signedHeaders should be of type "array"') } if (!isString(hashedPayload)) { diff --git a/src/transformers.js b/src/transformers.js index 4cde9a2a..34ddee3d 100644 --- a/src/transformers.js +++ b/src/transformers.js @@ -21,7 +21,7 @@ import _ from 'lodash' import Through2 from 'through2' import * as errors from './errors.ts' -import { isFunction } from './helpers.js' +import { isFunction } from './helpers.ts' import * as xmlParsers from './xml-parsers.js' // getConcater returns a stream that concatenates the input and emits diff --git a/src/type.ts b/src/type.ts new file mode 100644 index 00000000..f44ba1d7 --- /dev/null +++ b/src/type.ts @@ -0,0 +1,3 @@ +export type Binary = string | Buffer + +export type Mode = 'COMPLIANCE' | 'GOVERNANCE' diff --git a/src/xml-parsers.js b/src/xml-parsers.js index 447ec898..022eed15 100644 --- a/src/xml-parsers.js +++ b/src/xml-parsers.js @@ -28,7 +28,7 @@ import { sanitizeObjectKey, SelectResults, toArray, -} from './helpers.js' +} from './helpers.ts' // Parse XML and return information as Javascript types const fxp = new XMLParser() diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index d0ab714b..c6dfa136 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -36,9 +36,8 @@ import { CopySourceOptions, DEFAULT_REGION, getVersionId, - isArray, removeDirAndFiles, -} from '../../src/helpers.js' +} from '../../src/helpers.ts' import * as minio from '../../src/minio.js' const assert = chai.assert @@ -2447,7 +2446,7 @@ describe('functional tests', function () { if (err) { return done(err) } - if (isArray(tagList)) { + if (Array.isArray(tagList)) { done() } }) @@ -2507,7 +2506,7 @@ describe('functional tests', function () { if (err) { return done(err) } - if (isArray(tagList)) { + if (Array.isArray(tagList)) { done() } }) @@ -2604,7 +2603,7 @@ describe('functional tests', function () { if (err) { return done(err) } - if (isArray(tagList)) { + if (Array.isArray(tagList)) { done() } }) diff --git a/tests/unit/test.js b/tests/unit/test.js index ceaf4256..d3a94ee5 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -23,13 +23,12 @@ import { calculateEvenSplits, CopyDestinationOptions, CopySourceOptions, - isArray, isValidEndpoint, isValidIP, makeDateLong, makeDateShort, partsRequired, -} from '../../src/helpers.js' +} from '../../src/helpers.ts' import * as Minio from '../../src/minio.js' const Package = { version: 'development' } @@ -174,7 +173,7 @@ describe('Helpers', () => { const fnResult = calculateEvenSplits(testCase.size, testCase) const { startIndex, endIndex } = fnResult || {} - if (isArray(startIndex) && isArray(endIndex)) { + if (Array.isArray(startIndex) && Array.isArray(endIndex)) { const isExpectedResult = startIndex.length === testCase.expectedStart.length && endIndex.length === testCase.expectedEnd.length assert.equal(isExpectedResult, true) From 37e652843d577e13734b242afeaf9050b6bab31d Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 14:48:23 +0800 Subject: [PATCH 23/78] no need @internal on un-exported variable --- src/errors.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/errors.ts b/src/errors.ts index adf0bef2..3bdab3b7 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -16,9 +16,6 @@ /// -/** - * @internal - */ class ExtendableError extends Error { constructor(message?: string, opt?: ErrorOptions) { // error Option {cause?: unknown} is a 'nice to have', From af73d778d0f529ddabc7d236987eb196cebbf863 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 14:49:32 +0800 Subject: [PATCH 24/78] remove qs --- src/helpers.ts | 12 ++++++++---- src/qs.ts | 7 ------- 2 files changed, 8 insertions(+), 11 deletions(-) delete mode 100644 src/qs.ts diff --git a/src/helpers.ts b/src/helpers.ts index cd19539d..3f130f68 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -15,9 +15,9 @@ */ import * as crypto from 'node:crypto' -import fs from 'node:fs' +import * as fs from 'node:fs' import type { IncomingHttpHeaders } from 'node:http' -import path from 'node:path' +import * as path from 'node:path' import * as stream from 'node:stream' import { isBrowser } from 'browser-or-node' @@ -25,9 +25,9 @@ import { XMLParser } from 'fast-xml-parser' import ipaddr from 'ipaddr.js' import _ from 'lodash' import mime from 'mime-types' +import querystring from 'query-string' import * as errors from './errors.ts' -import { qs } from './qs.ts' import type { Binary, Mode } from './type.ts' export type MetaData = Record @@ -713,7 +713,11 @@ export class CopyDestinationOptions { const userTags = this.UserTags if (!isEmpty(userTags)) { headerOptions['X-Amz-Tagging-Directive'] = replaceDirective - headerOptions['X-Amz-Tagging'] = isObject(userTags) ? qs(userTags) : isString(userTags) ? userTags : '' + headerOptions['X-Amz-Tagging'] = isObject(userTags) + ? querystring.stringify(userTags) + : isString(userTags) + ? userTags + : '' } if (this.Mode) { diff --git a/src/qs.ts b/src/qs.ts deleted file mode 100644 index 56c17504..00000000 --- a/src/qs.ts +++ /dev/null @@ -1,7 +0,0 @@ -import queryString from 'query-string' - -// rfc 3986 encoding. -// `URLSearchParams` and `node:querystring` won't work -export function qs(q: Record): string { - return queryString.stringify(q) -} From 367ae09db898aff54f75ea9b1b86aa60fbfa1361 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 15:38:21 +0800 Subject: [PATCH 25/78] export all --- build.mjs | 16 ++++--- package.json | 4 +- src/helpers.ts | 50 ++++++++++----------- {types => src}/minio.d.ts | 95 ++++++++++++--------------------------- src/type.ts | 6 +++ 5 files changed, 70 insertions(+), 101 deletions(-) rename {types => src}/minio.d.ts (90%) diff --git a/build.mjs b/build.mjs index bc5c662a..50a6b3d2 100644 --- a/build.mjs +++ b/build.mjs @@ -64,14 +64,16 @@ async function buildFiles({ files, module, outDir }) { continue } - if (file.path.endsWith('.d.ts')) { - continue - } - const outFilePath = path.join(outDir, path.relative('src/', file.path)) const outDirPath = path.dirname(outFilePath) await fsp.mkdir(outDirPath, { recursive: true }) + const distCodePath = outFilePath.replace(/\.[tj]s$/g, extMap[module]) + + if (file.path.endsWith('.d.ts')) { + fs.copyFileSync(file.path, outFilePath) + continue + } try { const result = await babel.transformAsync(fs.readFileSync(file.path).toString(), { @@ -79,8 +81,6 @@ async function buildFiles({ files, module, outDir }) { ...opt, }) - const distCodePath = outFilePath.replace(/\.[tj]s$/g, extMap[module]) - fs.writeFileSync(distCodePath, result.code) } catch (e) { console.error(`failed to transpile ${file.path}`) @@ -114,6 +114,10 @@ async function main() { continue } + // if (file.path.endsWith('minio.d.ts')) { + // continue + // } + const fileContent = fs.readFileSync(file.path).toString() const mts = babel.transformSync(fileContent, { diff --git a/package.json b/package.json index 3025bbf7..f4df875a 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "S3 Compatible Cloud Storage client", "main": "./dist/main/minio.js", "module": "./dist/esm/minio.mjs", - "types": "./types/minio.d.ts", + "types": "./dist/main/minio.d.ts", "scripts": { "prepare": "husky install", "tsc": "tsc", @@ -22,7 +22,7 @@ }, "exports": { ".": { - "types": "./types/minio.d.ts", + "types": "./dist/main/minio.d.ts", "require": "./dist/main/minio.js", "default": "./dist/esm/minio.mjs" }, diff --git a/src/helpers.ts b/src/helpers.ts index 3f130f68..76b9e55f 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -16,7 +16,6 @@ import * as crypto from 'node:crypto' import * as fs from 'node:fs' -import type { IncomingHttpHeaders } from 'node:http' import * as path from 'node:path' import * as stream from 'node:stream' @@ -28,10 +27,7 @@ import mime from 'mime-types' import querystring from 'query-string' import * as errors from './errors.ts' -import type { Binary, Mode } from './type.ts' - -export type MetaData = Record -export type Header = Record +import type { Binary, Header, MetaData, Mode, ResponseHeader } from './type.ts' /** * All characters in string which are NOT unreserved should be percent encoded. @@ -280,7 +276,7 @@ export function isEmptyObject(o: Record): boolean { * check if arg is a valid date */ export function isValidDate(arg: unknown): arg is Date { - // @ts-expect-error TS(2345): Argument of type 'Date' is not assignable to param... Remove this comment to see the full error message + // @ts-expect-error checknew Date(Math.NaN) return arg instanceof Date && !isNaN(arg) } @@ -399,36 +395,39 @@ export function isStorageClassHeader(key: string) { return key.toLowerCase() === 'x-amz-storage-class' } -export function extractMetadata(metaData: IncomingHttpHeaders) { - const newMetadata = {} - for (const key in metaData) { +export function extractMetadata(headers: ResponseHeader) { + const newMetadata: Record = {} + for (const [key, value] of Object.entries(headers)) { if (isSupportedHeader(key) || isStorageClassHeader(key) || isAmzHeader(key)) { if (key.toLowerCase().startsWith('x-amz-meta-')) { - // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message - newMetadata[key.slice(11, key.length)] = metaData[key] + newMetadata[key.slice(11, key.length)] = value } else { - // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message - newMetadata[key] = metaData[key] + newMetadata[key] = value } } } return newMetadata } -export function getVersionId(headers: IncomingHttpHeaders = {}) { +export function getVersionId(headers: ResponseHeader = {}) { const versionIdValue = headers['x-amz-version-id'] as string return versionIdValue || null } -export function getSourceVersionId(headers: IncomingHttpHeaders = {}) { +export function getSourceVersionId(headers: ResponseHeader = {}) { const sourceVersionId = headers['x-amz-copy-source-version-id'] return sourceVersionId || null } export function sanitizeETag(etag = ''): string { - const replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } - // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message - return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m]) + const replaceChars: Record = { + '"': '', + '"': '', + '"': '', + '"': '', + '"': '', + } + return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m] as string) } export const RETENTION_MODES = { @@ -446,12 +445,12 @@ export const LEGAL_HOLD_STATUS = { DISABLED: 'OFF', } as const -function objectToBuffer(payload: Binary | Uint8Array): Buffer { +function objectToBuffer(payload: Binary): Buffer { // don't know how to write this... return Buffer.from(payload) } -export function toMd5(payload: Binary | Uint8Array): string { +export function toMd5(payload: Binary): string { let payLoadBuf: Binary = objectToBuffer(payload) // use string from browser and buffer from nodejs // browser support is tested only against minio server @@ -459,7 +458,7 @@ export function toMd5(payload: Binary | Uint8Array): string { return crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') } -export function toSha256(payload: Binary | Uint8Array): string { +export function toSha256(payload: Binary): string { return crypto.createHash('sha256').update(payload).digest('hex') } @@ -662,7 +661,7 @@ export class CopyDestinationOptions { public readonly Object: string private readonly Encryption?: Encryption private readonly UserMetadata?: MetaData - private readonly UserTags?: Record | string + private readonly UserTags?: Record | string private readonly LegalHold?: 'on' | 'off' private readonly RetainUntilDate?: string private readonly Mode?: Mode @@ -672,7 +671,7 @@ export class CopyDestinationOptions { * @param Object - Object Name for the destination (composed/copied) object defaults * @param Encryption - Encryption configuration defaults to {} * @param UserMetadata - - * @param UserTags + * @param UserTags - query-string escaped string or Record * @param LegalHold - * @param RetainUntilDate - UTC Date String * @param Mode @@ -691,7 +690,7 @@ export class CopyDestinationOptions { Object: string Encryption?: Encryption UserMetadata?: MetaData - UserTags?: Record | string + UserTags?: Record | string LegalHold?: 'on' | 'off' RetainUntilDate?: string Mode?: Mode @@ -848,8 +847,7 @@ const fxp = new XMLParser() // eslint-disable-next-line @typescript-eslint/no-explicit-any export function parseXml(xml: string): any { - let result = null - result = fxp.parse(xml) + let result = fxp.parse(xml) if (result.Error) { throw result.Error } diff --git a/types/minio.d.ts b/src/minio.d.ts similarity index 90% rename from types/minio.d.ts rename to src/minio.d.ts index dbd85e74..7829a070 100644 --- a/types/minio.d.ts +++ b/src/minio.d.ts @@ -1,9 +1,15 @@ // imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/93cfb0ec069731dcdfc31464788613f7cddb8192/types/minio/index.d.ts +/* eslint-disable */ + import { EventEmitter } from 'node:events' import type { RequestOptions } from 'node:https' import type { Readable as ReadableStream } from 'node:stream' +import type { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' + +export * from './helpers.ts' + // Exports only from typings export type Region = | 'us-east-1' @@ -649,14 +655,14 @@ export class Client { } } -export namespace Policy { - const NONE: 'none' - const READONLY: 'readonly' - const WRITEONLY: 'writeonly' - const READWRITE: 'readwrite' +export enum Policy { + NONE = 'none', + READONLY = 'readonly', + WRITEONLY = 'writeonly', + READWRITE = 'readwrite', } -export class CopyConditions { +export declare class CopyConditions { setModified(date: Date): void setUnmodified(date: Date): void @@ -666,7 +672,7 @@ export class CopyConditions { setMatchETagExcept(etag: string): void } -export class PostPolicy { +export declare class PostPolicy { setExpires(date: Date): void setKey(objectName: string): void @@ -686,7 +692,7 @@ export class PostPolicy { setUserMetaData(metadata: Record): void } -export class NotificationPoller extends EventEmitter { +export declare class NotificationPoller extends EventEmitter { stop(): void start(): void @@ -695,68 +701,23 @@ export class NotificationPoller extends EventEmitter { checkForChanges(): void } -export class NotificationConfig { +export declare class NotificationConfig { add(target: TopicConfig | QueueConfig | CloudFunctionConfig): void } -export class TopicConfig extends TargetConfig { +export declare class TopicConfig extends TargetConfig { constructor(arn: string) } -export class QueueConfig extends TargetConfig { +export declare class QueueConfig extends TargetConfig { constructor(arn: string) } -export class CloudFunctionConfig extends TargetConfig { +export declare class CloudFunctionConfig extends TargetConfig { constructor(arn: string) } -export class CopySourceOptions { - constructor(options: { - Bucket: string - Object: string - VersionID?: string - MatchETag?: string - NoMatchETag?: string - MatchModifiedSince?: string - MatchUnmodifiedSince?: string - MatchRange?: boolean - Start?: number - End?: number - Encryption?: { - type: string - SSEAlgorithm?: string - KMSMasterKeyID?: string - } - }) - - getHeaders(): Record - - validate(): boolean -} - -export class CopyDestinationOptions { - constructor(options: { - Bucket: string - Object: string - Encryption?: { - type: string - SSEAlgorithm?: string - KMSMasterKeyID?: string - } - UserMetadata?: Record - UserTags?: Record | string - LegalHold?: LegalHoldStatus - RetainUntilDate?: string - Mode?: Mode - }) - - getHeaders(): Record - - validate(): boolean -} - -export function buildARN( +export declare function buildARN( partition: string, service: string, region: string, @@ -764,12 +725,12 @@ export function buildARN( resource: string, ): string -export const ObjectCreatedAll: NotificationEvent // s3:ObjectCreated:*' -export const ObjectCreatedPut: NotificationEvent // s3:ObjectCreated:Put -export const ObjectCreatedPost: NotificationEvent // s3:ObjectCreated:Post -export const ObjectCreatedCopy: NotificationEvent // s3:ObjectCreated:Copy -export const ObjectCreatedCompleteMultipartUpload: NotificationEvent // s3:ObjectCreated:CompleteMultipartUpload -export const ObjectRemovedAll: NotificationEvent // s3:ObjectRemoved:* -export const ObjectRemovedDelete: NotificationEvent // s3:ObjectRemoved:Delete -export const ObjectRemovedDeleteMarkerCreated: NotificationEvent // s3:ObjectRemoved:DeleteMarkerCreated -export const ObjectReducedRedundancyLostObject: NotificationEvent // s3:ReducedRedundancyLostObject +export declare const ObjectCreatedAll: NotificationEvent // s3:ObjectCreated:*' +export declare const ObjectCreatedPut: NotificationEvent // s3:ObjectCreated:Put +export declare const ObjectCreatedPost: NotificationEvent // s3:ObjectCreated:Post +export declare const ObjectCreatedCopy: NotificationEvent // s3:ObjectCreated:Copy +export declare const ObjectCreatedCompleteMultipartUpload: NotificationEvent // s3:ObjectCreated:CompleteMultipartUpload +export declare const ObjectRemovedAll: NotificationEvent // s3:ObjectRemoved:* +export declare const ObjectRemovedDelete: NotificationEvent // s3:ObjectRemoved:Delete +export declare const ObjectRemovedDeleteMarkerCreated: NotificationEvent // s3:ObjectRemoved:DeleteMarkerCreated +export declare const ObjectReducedRedundancyLostObject: NotificationEvent // s3:ReducedRedundancyLostObject diff --git a/src/type.ts b/src/type.ts index f44ba1d7..39917a44 100644 --- a/src/type.ts +++ b/src/type.ts @@ -1,3 +1,9 @@ export type Binary = string | Buffer export type Mode = 'COMPLIANCE' | 'GOVERNANCE' + +// nodejs IncomingHttpHeaders is Record, but it's actually this: +export type ResponseHeader = Record + +export type MetaData = Record +export type Header = Record From 1a232076424d2a98eb01af629ae63dad364cab1d Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 15:41:11 +0800 Subject: [PATCH 26/78] timeout --- .github/workflows/nodejs-windows.yml | 1 + .github/workflows/nodejs.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/nodejs-windows.yml b/.github/workflows/nodejs-windows.yml index 51abb81c..e7170b8c 100644 --- a/.github/workflows/nodejs-windows.yml +++ b/.github/workflows/nodejs-windows.yml @@ -28,6 +28,7 @@ jobs: - run: npm i - name: Start MinIO Server -> Run Unit and Functional Tests + timeout-minutes: 10 env: CI: true MINIO_CI_CD: true diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index 7c1c8db4..709fd78c 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -28,6 +28,7 @@ jobs: - run: npm i - name: Start Server -> Run Unit and Functional Tests + timeout-minutes: 10 env: CI: true MINIO_CI_CD: true From c9d7977d615e609d89f43c9f2a096f9ea7a0f93f Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 15:52:31 +0800 Subject: [PATCH 27/78] fix pipesetup --- src/helpers.ts | 9 ++++++--- src/minio.d.ts | 2 -- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/helpers.ts b/src/helpers.ts index 76b9e55f..6e43dcf7 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -309,9 +309,12 @@ export function makeDateShort(date?: Date) { * pipesetup will also make sure that error emitted at any of the upstream Stream * will be emitted at the last stream. This makes error handling simple */ -export function pipesetup(src: stream.Readable, dst: stream.Writable) { - src.on('error', (err: unknown) => dst.emit('error', err)) - return src.pipe(dst) +export function pipesetup(...streams: [stream.Writable, ...stream.Duplex[], stream.Readable]) { + // @ts-expect-error ts can't narrow this + return streams.reduce((src: stream.Readable, dst: stream.Writable) => { + src.on('error', (err) => dst.emit('error', err)) + return src.pipe(dst) + }) } /** diff --git a/src/minio.d.ts b/src/minio.d.ts index 7829a070..ba93cb89 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -1,7 +1,5 @@ // imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/93cfb0ec069731dcdfc31464788613f7cddb8192/types/minio/index.d.ts -/* eslint-disable */ - import { EventEmitter } from 'node:events' import type { RequestOptions } from 'node:https' import type { Readable as ReadableStream } from 'node:stream' From a7edf5e24bea4b5e7b0a7737924f6932ef2d850f Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 15:53:06 +0800 Subject: [PATCH 28/78] dead code --- build.mjs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/build.mjs b/build.mjs index 50a6b3d2..71d43aca 100644 --- a/build.mjs +++ b/build.mjs @@ -114,10 +114,6 @@ async function main() { continue } - // if (file.path.endsWith('minio.d.ts')) { - // continue - // } - const fileContent = fs.readFileSync(file.path).toString() const mts = babel.transformSync(fileContent, { From be1c8de6094317ef69cbb7a8b1e706d8bc80abbd Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 15:58:01 +0800 Subject: [PATCH 29/78] use type narrow --- src/helpers.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/helpers.ts b/src/helpers.ts index 6e43dcf7..b243ef3c 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -789,7 +789,7 @@ export function partsRequired(size: number): number { * part is less than 5MiB, as that could fail the multipart request if * it is not the last part. */ -export function calculateEvenSplits(size: number, objInfo: { Start?: unknown; Bucket: string; Object: string }) { +export function calculateEvenSplits(size: number, objInfo: { Start?: number; Bucket: string; Object: string }) { if (size === 0) { return null } @@ -797,8 +797,8 @@ export function calculateEvenSplits(size: number, objInfo: { Start?: unknown; Bu const startIndexParts = new Array(reqParts) const endIndexParts = new Array(reqParts) - let start = objInfo.Start as number - if (isEmpty(objInfo.Start) || start === -1) { + let start = objInfo.Start + if (isEmpty(start) || start === -1) { start = 0 } const divisorValue = Math.trunc(size / reqParts) From e77d3e5584badbdb3b37c31a970e2a71dab95520 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 16:03:11 +0800 Subject: [PATCH 30/78] types --- src/helpers.ts | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/helpers.ts b/src/helpers.ts index b243ef3c..9845ec00 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -789,13 +789,20 @@ export function partsRequired(size: number): number { * part is less than 5MiB, as that could fail the multipart request if * it is not the last part. */ -export function calculateEvenSplits(size: number, objInfo: { Start?: number; Bucket: string; Object: string }) { +export function calculateEvenSplits( + size: number, + objInfo: T, +): { + startIndex: number[] + objInfo: T + endIndex: number[] +} | null { if (size === 0) { return null } const reqParts = partsRequired(size) - const startIndexParts = new Array(reqParts) - const endIndexParts = new Array(reqParts) + const startIndexParts: number[] = [] + const endIndexParts: number[] = [] let start = objInfo.Start if (isEmpty(start) || start === -1) { @@ -817,8 +824,8 @@ export function calculateEvenSplits(size: number, objInfo: { Start?: number; Buc const currentEnd = currentStart + curPartSize - 1 nextStart = currentEnd + 1 - startIndexParts[i] = currentStart - endIndexParts[i] = currentEnd + startIndexParts.push(currentStart) + endIndexParts.push(currentEnd) } return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo } From be6135da5fb73f2c590c0dd3ca04fbbaa6a0967b Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 16:06:28 +0800 Subject: [PATCH 31/78] remove type without actually value --- src/minio.d.ts | 7 ------- 1 file changed, 7 deletions(-) diff --git a/src/minio.d.ts b/src/minio.d.ts index ba93cb89..53e83af3 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -653,13 +653,6 @@ export class Client { } } -export enum Policy { - NONE = 'none', - READONLY = 'readonly', - WRITEONLY = 'writeonly', - READWRITE = 'readwrite', -} - export declare class CopyConditions { setModified(date: Date): void From 4fc5d373c800aeb3c92560f475ac33c89aabd76a Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 16:10:31 +0800 Subject: [PATCH 32/78] region --- src/minio.d.ts | 14 ++------------ src/minio.js | 2 +- src/{s3-endpoints.js => s3-endpoints.ts} | 7 +++++-- 3 files changed, 8 insertions(+), 15 deletions(-) rename src/{s3-endpoints.js => s3-endpoints.ts} (90%) diff --git a/src/minio.d.ts b/src/minio.d.ts index 53e83af3..b6902e46 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -5,22 +5,12 @@ import type { RequestOptions } from 'node:https' import type { Readable as ReadableStream } from 'node:stream' import type { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' +import type { Region } from './s3-endpoints.ts' export * from './helpers.ts' +export { Region } from './s3-endpoints.ts' // Exports only from typings -export type Region = - | 'us-east-1' - | 'us-west-1' - | 'us-west-2' - | 'eu-west-1' - | 'eu-central-1' - | 'ap-southeast-1' - | 'ap-northeast-1' - | 'ap-southeast-2' - | 'sa-east-1' - | 'cn-north-1' - | string export type NotificationEvent = | 's3:ObjectCreated:*' | 's3:ObjectCreated:Put' diff --git a/src/minio.js b/src/minio.js index c0df51a1..a8835fa3 100644 --- a/src/minio.js +++ b/src/minio.js @@ -74,7 +74,7 @@ import { import { NotificationConfig, NotificationPoller } from './notification.js' import { ObjectUploader } from './object-uploader.js' import { promisify } from './promisify.js' -import { getS3Endpoint } from './s3-endpoints.js' +import { getS3Endpoint } from './s3-endpoints.ts' import { postPresignSignatureV4, presignSignatureV4, signV4 } from './signing.js' import * as transformers from './transformers.js' import { parseSelectObjectContentResponse } from './xml-parsers.js' diff --git a/src/s3-endpoints.js b/src/s3-endpoints.ts similarity index 90% rename from src/s3-endpoints.js rename to src/s3-endpoints.ts index f37d2ba7..36a424e5 100644 --- a/src/s3-endpoints.js +++ b/src/s3-endpoints.ts @@ -37,12 +37,15 @@ const awsS3Endpoint = { // Add new endpoints here. } +export type Region = keyof typeof awsS3Endpoint | string + // getS3Endpoint get relevant endpoint for the region. -export function getS3Endpoint(region) { +export function getS3Endpoint(region: Region): string { if (!isString(region)) { throw new TypeError(`Invalid region: ${region}`) } - var endpoint = awsS3Endpoint[region] + + const endpoint = (awsS3Endpoint as Record)[region] if (endpoint) { return endpoint } From 457594ae1f6effeff7f6cf600996bfe8e3d01cb9 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 16:11:36 +0800 Subject: [PATCH 33/78] export type --- src/minio.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/minio.d.ts b/src/minio.d.ts index b6902e46..2c104e88 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -8,7 +8,7 @@ import type { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' import type { Region } from './s3-endpoints.ts' export * from './helpers.ts' -export { Region } from './s3-endpoints.ts' +export type { Region } from './s3-endpoints.ts' // Exports only from typings export type NotificationEvent = From 13c4ccbea6d6bbdf17aff7cb737afb5a9e19066f Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 16:36:19 +0800 Subject: [PATCH 34/78] no need to split --- src/helpers.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/helpers.ts b/src/helpers.ts index 9845ec00..9ec312cb 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -126,7 +126,8 @@ export function isValidDomain(host: string) { if (host[0] === '.') { return false } - const alphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:> Date: Fri, 5 May 2023 20:02:06 +0800 Subject: [PATCH 35/78] type check --- package.json | 2 + src/AssumeRoleProvider.js | 2 +- src/copy-source-options.ts | 112 ++++ src/extensions.js | 10 +- src/helpers.ts | 833 ++------------------------- src/internal/assert.ts | 592 +++++++++++++++++++ src/{ => internal}/type.ts | 14 +- src/minio.d.ts | 24 +- src/minio.js | 13 +- src/notification.js | 3 +- src/object-uploader.js | 2 +- src/s3-endpoints.ts | 2 +- src/select-results.ts | 58 ++ src/signing.js | 3 +- src/transformers.js | 2 +- src/xml-parsers.js | 13 +- tests/functional/functional-tests.js | 9 +- tests/unit/test.js | 5 +- 18 files changed, 864 insertions(+), 835 deletions(-) create mode 100644 src/copy-source-options.ts create mode 100644 src/internal/assert.ts rename src/{ => internal}/type.ts (50%) create mode 100644 src/select-results.ts diff --git a/package.json b/package.json index f4df875a..40421f9b 100644 --- a/package.json +++ b/package.json @@ -26,11 +26,13 @@ "require": "./dist/main/minio.js", "default": "./dist/esm/minio.mjs" }, + "./dist/main/internal/*": null, "./dist/main/*": { "types": "./dist/main/*", "require": "./dist/main/*", "default": null }, + "./dist/esm/internal/*": null, "./dist/esm/*": { "types": "./dist/esm/*", "import": "./dist/esm/*", diff --git a/src/AssumeRoleProvider.js b/src/AssumeRoleProvider.js index 3a3b343b..23ad7e56 100644 --- a/src/AssumeRoleProvider.js +++ b/src/AssumeRoleProvider.js @@ -4,7 +4,7 @@ import { URL, URLSearchParams } from 'node:url' import { CredentialProvider } from './CredentialProvider.js' import { Credentials } from './Credentials.js' -import { makeDateLong, parseXml, toSha256 } from './helpers.ts' +import { makeDateLong, parseXml, toSha256 } from './internal/assert.ts' import { signV4ByServiceName } from './signing.js' export class AssumeRoleProvider extends CredentialProvider { diff --git a/src/copy-source-options.ts b/src/copy-source-options.ts new file mode 100644 index 00000000..94198a74 --- /dev/null +++ b/src/copy-source-options.ts @@ -0,0 +1,112 @@ +import * as errors from './errors.ts' +import { isEmpty, isNumber, isValidBucketName, isValidObjectName } from './internal/assert.ts' +import type { Encryption, Header } from './internal/type.ts' + +export class CopySourceOptions { + public readonly Bucket: string + public readonly Object: string + public readonly VersionID: string + public MatchETag: string + private readonly NoMatchETag: string + private readonly MatchModifiedSince: string | null + private readonly MatchUnmodifiedSince: string | null + public readonly MatchRange: boolean + public readonly Start: number + public readonly End: number + private readonly Encryption?: Encryption + + /** + * + * @param Bucket - Bucket Name + * @param Object - Object Name + * @param VersionID - Valid versionId + * @param MatchETag - Etag to match + * @param NoMatchETag - Etag to exclude + * @param MatchModifiedSince - Modified Date of the object/part. UTC Date in string format + * @param MatchUnmodifiedSince - Modified Date of the object/part to exclude UTC Date in string format + * @param MatchRange - true or false Object range to match + * @param Start + * @param End + * @param Encryption + */ + constructor({ + Bucket = '', + Object = '', + VersionID = '', + MatchETag = '', + NoMatchETag = '', + MatchModifiedSince = null, + MatchUnmodifiedSince = null, + MatchRange = false, + Start = 0, + End = 0, + Encryption = undefined, + }: { + Bucket?: string + Object?: string + VersionID?: string + MatchETag?: string + NoMatchETag?: string + MatchModifiedSince?: string | null + MatchUnmodifiedSince?: string | null + MatchRange?: boolean + Start?: number + End?: number + Encryption?: Encryption + } = {}) { + this.Bucket = Bucket + this.Object = Object + this.VersionID = VersionID + this.MatchETag = MatchETag + this.NoMatchETag = NoMatchETag + this.MatchModifiedSince = MatchModifiedSince + this.MatchUnmodifiedSince = MatchUnmodifiedSince + this.MatchRange = MatchRange + this.Start = Start + this.End = End + this.Encryption = Encryption + } + + validate() { + if (!isValidBucketName(this.Bucket)) { + throw new errors.InvalidBucketNameError('Invalid Source bucket name: ' + this.Bucket) + } + if (!isValidObjectName(this.Object)) { + throw new errors.InvalidObjectNameError(`Invalid Source object name: ${this.Object}`) + } + if ((this.MatchRange && this.Start !== -1 && this.End !== -1 && this.Start > this.End) || this.Start < 0) { + throw new errors.InvalidObjectNameError('Source start must be non-negative, and start must be at most end.') + } else if ((this.MatchRange && !isNumber(this.Start)) || !isNumber(this.End)) { + throw new errors.InvalidObjectNameError( + 'MatchRange is specified. But Invalid Start and End values are specified. ', + ) + } + + return true + } + + getHeaders() { + const headerOptions: Header = {} + headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + + if (!isEmpty(this.VersionID)) { + headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID + } + + if (!isEmpty(this.MatchETag)) { + headerOptions['x-amz-copy-source-if-match'] = this.MatchETag + } + if (!isEmpty(this.NoMatchETag)) { + headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag + } + + if (!isEmpty(this.MatchModifiedSince)) { + headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince + } + if (!isEmpty(this.MatchUnmodifiedSince)) { + headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince + } + + return headerOptions + } +} diff --git a/src/extensions.js b/src/extensions.js index a81a6222..d45ad3d6 100644 --- a/src/extensions.js +++ b/src/extensions.js @@ -17,7 +17,15 @@ import * as Stream from 'node:stream' import * as errors from './errors.ts' -import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.ts' +import { + isBoolean, + isNumber, + isString, + isValidBucketName, + isValidPrefix, + pipesetup, + uriEscape, +} from './internal/assert.ts' import * as transformers from './transformers.js' export class extensions { diff --git a/src/helpers.ts b/src/helpers.ts index 9ec312cb..c2340d0a 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -1,663 +1,59 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as crypto from 'node:crypto' import * as fs from 'node:fs' import * as path from 'node:path' -import * as stream from 'node:stream' -import { isBrowser } from 'browser-or-node' -import { XMLParser } from 'fast-xml-parser' -import ipaddr from 'ipaddr.js' -import _ from 'lodash' -import mime from 'mime-types' import querystring from 'query-string' import * as errors from './errors.ts' -import type { Binary, Header, MetaData, Mode, ResponseHeader } from './type.ts' - -/** - * All characters in string which are NOT unreserved should be percent encoded. - * Unreserved characters are : ALPHA / DIGIT / "-" / "." / "_" / "~" - * Reference https://tools.ietf.org/html/rfc3986#section-2.2 - */ -export function uriEscape(string: string) { - return string.split('').reduce((acc: string, elem: string) => { - const buf = Buffer.from(elem) - if (buf.length === 1) { - // length 1 indicates that elem is not a unicode character. - // Check if it is an unreserved characer. - if ( - ('A' <= elem && elem <= 'Z') || - ('a' <= elem && elem <= 'z') || - ('0' <= elem && elem <= '9') || - elem === '_' || - elem === '.' || - elem === '~' || - elem === '-' - ) { - // Unreserved characer should not be encoded. - acc = acc + elem - return acc - } - } - // elem needs encoding - i.e elem should be encoded if it's not unreserved - // character or if it's a unicode character. - for (const char of buf) { - acc = acc + '%' + char.toString(16).toUpperCase() - } - return acc - }, '') -} - -export function uriResourceEscape(string: string) { - return uriEscape(string).replace(/%2F/g, '/') -} - -export function getScope(region: string, date: Date, serviceName = 's3') { - return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request` -} - -/** - * isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' - */ -export function isAmazonEndpoint(endpoint: string) { - return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn' -} - -/** - * isVirtualHostStyle - verify if bucket name is support with virtual - * hosts. bucketNames with periods should be always treated as path - * style if the protocol is 'https:', this is due to SSL wildcard - * limitation. For all other buckets and Amazon S3 endpoint we will - * default to virtual host style. - */ -export function isVirtualHostStyle(endpoint: string, protocol: string, bucket: string, pathStyle: boolean) { - if (protocol === 'https:' && bucket.includes('.')) { - return false - } - return isAmazonEndpoint(endpoint) || !pathStyle -} - -export function isValidIP(ip: string) { - return ipaddr.isValid(ip) -} - -/** - * @returns if endpoint is valid domain. - */ -export function isValidEndpoint(endpoint: string) { - return isValidDomain(endpoint) || isValidIP(endpoint) -} - -/** - * @returns if input host is a valid domain. - */ -export function isValidDomain(host: string) { - if (!isString(host)) { - return false - } - // See RFC 1035, RFC 3696. - if (host.length === 0 || host.length > 255) { - return false - } - // Host cannot start or end with a '-' - if (host[0] === '-' || host.slice(-1) === '-') { - return false - } - // Host cannot start or end with a '_' - if (host[0] === '_' || host.slice(-1) === '_') { - return false - } - // Host cannot start with a '.' - if (host[0] === '.') { - return false - } - - const alphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:> 63) { - return false - } - // bucket with successive periods is invalid. - if (bucket.includes('..')) { - return false - } - // bucket cannot have ip address style. - if (/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/.test(bucket)) { - return false - } - // bucket should begin with alphabet/number and end with alphabet/number, - // with alphabet/number/.- in the middle. - if (/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/.test(bucket)) { - return true - } - return false -} - -/** - * check if objectName is a valid object name - */ -export function isValidObjectName(objectName: unknown) { - if (!isValidPrefix(objectName)) { - return false - } - - return objectName.length !== 0 -} - -/** - * check if prefix is valid - */ -export function isValidPrefix(prefix: unknown): prefix is string { - if (!isString(prefix)) { - return false - } - if (prefix.length > 1024) { - return false - } - return true -} - -/** - * check if typeof arg number - */ -export function isNumber(arg: unknown): arg is number { - return typeof arg === 'number' -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export type AnyFunction = (...args: any[]) => any - -/** - * check if typeof arg function - */ -export function isFunction(arg: unknown): arg is AnyFunction { - return typeof arg === 'function' -} - -/** - * check if typeof arg string - */ -export function isString(arg: unknown): arg is string { - return typeof arg === 'string' -} - -/** - * check if typeof arg object - */ -export function isObject(arg: unknown): arg is object { - return typeof arg === 'object' && arg !== null -} +import { + getEncryptionHeaders, + isEmpty, + isEmptyObject, + isObject, + isString, + isValidBucketName, + isValidObjectName, +} from './internal/assert.ts' +import type { Encryption, MetaData } from './internal/type.ts' +export { CopySourceOptions } from './copy-source-options.ts' +export { SelectResults } from './select-results.ts' -/** - * check if object is readable stream - */ -export function isReadableStream(arg: unknown): arg is stream.Readable { - // eslint-disable-next-line @typescript-eslint/unbound-method - return isObject(arg) && isFunction((arg as stream.Readable)._read) -} - -/** - * check if arg is boolean - */ -export function isBoolean(arg: unknown): arg is boolean { - return typeof arg === 'boolean' -} - -export function isEmpty(o: unknown): o is null | undefined { - return _.isEmpty(o) -} - -export function isEmptyObject(o: Record): boolean { - return Object.values(o).filter((x) => x !== undefined).length !== 0 -} - -/** - * check if arg is a valid date - */ -export function isValidDate(arg: unknown): arg is Date { - // @ts-expect-error checknew Date(Math.NaN) - return arg instanceof Date && !isNaN(arg) -} - -/** - * Create a Date string with format: 'YYYYMMDDTHHmmss' + Z - */ -export function makeDateLong(date?: Date): string { - date = date || new Date() - - // Gives format like: '2017-08-07T16:28:59.889Z' - const s = date.toISOString() - - return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 13) + s.slice(14, 16) + s.slice(17, 19) + 'Z' -} - -/** - * Create a Date string with format: 'YYYYMMDD' - */ -export function makeDateShort(date?: Date) { - date = date || new Date() - - // Gives format like: '2017-08-07T16:28:59.889Z' - const s = date.toISOString() - - return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 10) -} - -/** - * pipesetup sets up pipe() from left to right os streams array - * pipesetup will also make sure that error emitted at any of the upstream Stream - * will be emitted at the last stream. This makes error handling simple - */ -export function pipesetup(...streams: [stream.Writable, ...stream.Duplex[], stream.Readable]) { - // @ts-expect-error ts can't narrow this - return streams.reduce((src: stream.Readable, dst: stream.Writable) => { - src.on('error', (err) => dst.emit('error', err)) - return src.pipe(dst) - }) -} - -/** - * return a Readable stream that emits data - */ -export function readableStream(data: unknown): stream.Readable { - const s = new stream.Readable() - s._read = () => {} - s.push(data) - s.push(null) - return s -} - -/** - * Process metadata to insert appropriate value to `content-type` attribute - */ -export function insertContentType(metaData: MetaData, filePath: string) { - // check if content-type attribute present in metaData - for (const key in metaData) { - if (key.toLowerCase() === 'content-type') { - return metaData - } - } - // if `content-type` attribute is not present in metadata, - // then infer it from the extension in filePath - const newMetadata = Object.assign({}, metaData) - newMetadata['content-type'] = probeContentType(filePath) - return newMetadata -} - -/** - * Function prepends metadata with the appropriate prefix if it is not already on - */ -export function prependXAMZMeta(metaData?: MetaData) { - if (!metaData) { - return {} - } +export const DEFAULT_REGION = 'us-east-1' - const newMetadata = Object.assign({}, metaData) - for (const [key, value] of _.entries(metaData)) { - if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageClassHeader(key)) { - newMetadata['X-Amz-Meta-' + key] = value - delete newMetadata[key] - } +export function removeDirAndFiles(dirPath: string, removeSelf = true) { + let files + try { + files = fs.readdirSync(dirPath) + } catch (e) { + return } - return newMetadata -} - -/** - * Checks if it is a valid header according to the AmazonS3 API - */ -export function isAmzHeader(key: string) { - const temp = key.toLowerCase() - return ( - temp.startsWith('x-amz-meta-') || - temp === 'x-amz-acl' || - temp.startsWith('x-amz-server-side-encryption-') || - temp === 'x-amz-server-side-encryption' - ) -} -/** - * Checks if it is a supported Header - */ -export function isSupportedHeader(key: string) { - const supported_headers = [ - 'content-type', - 'cache-control', - 'content-encoding', - 'content-disposition', - 'content-language', - 'x-amz-website-redirect-location', - ] - return supported_headers.includes(key.toLowerCase()) -} - -/** - * Checks if it is a storage header - */ -export function isStorageClassHeader(key: string) { - return key.toLowerCase() === 'x-amz-storage-class' -} - -export function extractMetadata(headers: ResponseHeader) { - const newMetadata: Record = {} - for (const [key, value] of Object.entries(headers)) { - if (isSupportedHeader(key) || isStorageClassHeader(key) || isAmzHeader(key)) { - if (key.toLowerCase().startsWith('x-amz-meta-')) { - newMetadata[key.slice(11, key.length)] = value - } else { - newMetadata[key] = value - } + for (const item of files) { + const filePath = path.join(dirPath, item) + if (fs.statSync(filePath).isFile()) { + fs.unlinkSync(filePath) + } else { + removeDirAndFiles(filePath, true) } } - return newMetadata -} - -export function getVersionId(headers: ResponseHeader = {}) { - const versionIdValue = headers['x-amz-version-id'] as string - return versionIdValue || null -} - -export function getSourceVersionId(headers: ResponseHeader = {}) { - const sourceVersionId = headers['x-amz-copy-source-version-id'] - return sourceVersionId || null -} - -export function sanitizeETag(etag = ''): string { - const replaceChars: Record = { - '"': '', - '"': '', - '"': '', - '"': '', - '"': '', - } - return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m] as string) -} - -export const RETENTION_MODES = { - GOVERNANCE: 'GOVERNANCE', - COMPLIANCE: 'COMPLIANCE', -} as const - -export const RETENTION_VALIDITY_UNITS = { - DAYS: 'Days', - YEARS: 'Years', -} as const - -export const LEGAL_HOLD_STATUS = { - ENABLED: 'ON', - DISABLED: 'OFF', -} as const - -function objectToBuffer(payload: Binary): Buffer { - // don't know how to write this... - return Buffer.from(payload) -} - -export function toMd5(payload: Binary): string { - let payLoadBuf: Binary = objectToBuffer(payload) - // use string from browser and buffer from nodejs - // browser support is tested only against minio server - payLoadBuf = isBrowser ? payLoadBuf.toString() : payLoadBuf - return crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') -} - -export function toSha256(payload: Binary): string { - return crypto.createHash('sha256').update(payload).digest('hex') -} -/** - * toArray returns a single element array with param being the element, - * if param is just a string, and returns 'param' back if it is an array - * So, it makes sure param is always an array - */ -export function toArray(param: T | T[]): Array { - if (!Array.isArray(param)) { - return [param] as T[] + if (removeSelf) { + fs.rmdirSync(dirPath) } - return param -} - -export function sanitizeObjectKey(objectName: string): string { - // + symbol characters are not decoded as spaces in JS. so replace them first and decode to get the correct result. - const asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') - return decodeURIComponent(asStrName) -} - -export const PART_CONSTRAINTS = { - // absMinPartSize - absolute minimum part size (5 MiB) - ABS_MIN_PART_SIZE: 1024 * 1024 * 5, - // MIN_PART_SIZE - minimum part size 16MiB per object after which - MIN_PART_SIZE: 1024 * 1024 * 16, - // MAX_PARTS_COUNT - maximum number of parts for a single multipart session. - MAX_PARTS_COUNT: 10000, - // MAX_PART_SIZE - maximum part size 5GiB for a single multipart upload - // operation. - MAX_PART_SIZE: 1024 * 1024 * 1024 * 5, - // MAX_SINGLE_PUT_OBJECT_SIZE - maximum size 5GiB of object per PUT - // operation. - MAX_SINGLE_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 5, - // MAX_MULTIPART_PUT_OBJECT_SIZE - maximum size 5TiB of object for - // Multipart operation. - MAX_MULTIPART_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 1024 * 5, } -export const ENCRYPTION_TYPES = { - // SSEC represents server-side-encryption with customer provided keys - SSEC: 'SSE-C', - // KMS represents server-side-encryption with managed keys - KMS: 'KMS', +export enum RETENTION_MODES { + GOVERNANCE = 'GOVERNANCE', + COMPLIANCE = 'COMPLIANCE', } -const GENERIC_SSE_HEADER = 'X-Amz-Server-Side-Encryption' - -const ENCRYPTION_HEADERS = { - // sseGenericHeader is the AWS SSE header used for SSE-S3 and SSE-KMS. - sseGenericHeader: GENERIC_SSE_HEADER, - // sseKmsKeyID is the AWS SSE-KMS key id. - sseKmsKeyID: GENERIC_SSE_HEADER + '-Aws-Kms-Key-Id', -} as const - -/** - * Return Encryption headers - * @param encConfig - * @returns an object with key value pairs that can be used in headers. - */ -function getEncryptionHeaders(encConfig: Encryption): Record { - const encType = encConfig.type - const encHeaders = {} - if (!isEmpty(encType)) { - if (encType === ENCRYPTION_TYPES.SSEC) { - return { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - [encHeaders[ENCRYPTION_HEADERS.sseGenericHeader]]: 'AES256', - } - } else if (encType === ENCRYPTION_TYPES.KMS) { - return { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - [ENCRYPTION_HEADERS.sseGenericHeader]: encConfig.SSEAlgorithm, - [ENCRYPTION_HEADERS.sseKmsKeyID]: encConfig.KMSMasterKeyID, - } - } - } - - return encHeaders -} - -export class CopySourceOptions { - public readonly Bucket: string - public readonly Object: string - public readonly VersionID: string - public MatchETag: string - private readonly NoMatchETag: string - private readonly MatchModifiedSince: string | null - private readonly MatchUnmodifiedSince: string | null - public readonly MatchRange: boolean - public readonly Start: number - public readonly End: number - private readonly Encryption?: Encryption - - /** - * - * @param Bucket - Bucket Name - * @param Object - Object Name - * @param VersionID - Valid versionId - * @param MatchETag - Etag to match - * @param NoMatchETag - Etag to exclude - * @param MatchModifiedSince - Modified Date of the object/part. UTC Date in string format - * @param MatchUnmodifiedSince - Modified Date of the object/part to exclude UTC Date in string format - * @param MatchRange - true or false Object range to match - * @param Start - * @param End - * @param Encryption - */ - constructor({ - Bucket = '', - Object = '', - VersionID = '', - MatchETag = '', - NoMatchETag = '', - MatchModifiedSince = null, - MatchUnmodifiedSince = null, - MatchRange = false, - Start = 0, - End = 0, - Encryption = undefined, - }: { - Bucket?: string - Object?: string - VersionID?: string - MatchETag?: string - NoMatchETag?: string - MatchModifiedSince?: string | null - MatchUnmodifiedSince?: string | null - MatchRange?: boolean - Start?: number - End?: number - Encryption?: Encryption - } = {}) { - this.Bucket = Bucket - this.Object = Object - this.VersionID = VersionID - this.MatchETag = MatchETag - this.NoMatchETag = NoMatchETag - this.MatchModifiedSince = MatchModifiedSince - this.MatchUnmodifiedSince = MatchUnmodifiedSince - this.MatchRange = MatchRange - this.Start = Start - this.End = End - this.Encryption = Encryption - } - - validate() { - if (!isValidBucketName(this.Bucket)) { - throw new errors.InvalidBucketNameError('Invalid Source bucket name: ' + this.Bucket) - } - if (!isValidObjectName(this.Object)) { - throw new errors.InvalidObjectNameError(`Invalid Source object name: ${this.Object}`) - } - if ((this.MatchRange && this.Start !== -1 && this.End !== -1 && this.Start > this.End) || this.Start < 0) { - throw new errors.InvalidObjectNameError('Source start must be non-negative, and start must be at most end.') - } else if ((this.MatchRange && !isNumber(this.Start)) || !isNumber(this.End)) { - throw new errors.InvalidObjectNameError( - 'MatchRange is specified. But Invalid Start and End values are specified. ', - ) - } - - return true - } - - getHeaders() { - const headerOptions: Header = {} - headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) - - if (!isEmpty(this.VersionID)) { - headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID - } - if (!isEmpty(this.MatchETag)) { - headerOptions['x-amz-copy-source-if-match'] = this.MatchETag - } - if (!isEmpty(this.NoMatchETag)) { - headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag - } - - if (!isEmpty(this.MatchModifiedSince)) { - headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince - } - if (!isEmpty(this.MatchUnmodifiedSince)) { - headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince - } - - return headerOptions - } +export enum RETENTION_VALIDITY_UNITS { + DAYS = 'Days', + YEARS = 'Years', } -export type Encryption = { - type: string - SSEAlgorithm?: string - KMSMasterKeyID?: string +export enum LEGAL_HOLD_STATUS { + ENABLED = 'ON', + DISABLED = 'OFF', } export class CopyDestinationOptions { @@ -668,7 +64,7 @@ export class CopyDestinationOptions { private readonly UserTags?: Record | string private readonly LegalHold?: 'on' | 'off' private readonly RetainUntilDate?: string - private readonly Mode?: Mode + private readonly Mode?: RETENTION_MODES /** * @param Bucket - Bucket name @@ -697,7 +93,7 @@ export class CopyDestinationOptions { UserTags?: Record | string LegalHold?: 'on' | 'off' RetainUntilDate?: string - Mode?: Mode + Mode?: RETENTION_MODES }) { this.Bucket = Bucket this.Object = Object @@ -773,156 +169,3 @@ export class CopyDestinationOptions { return true } } - -export function partsRequired(size: number): number { - const maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) - let requiredPartSize = size / maxPartSize - if (size % maxPartSize > 0) { - requiredPartSize++ - } - requiredPartSize = Math.trunc(requiredPartSize) - return requiredPartSize -} - -/** - * calculateEvenSplits - computes splits for a source and returns - * start and end index slices. Splits happen evenly to be sure that no - * part is less than 5MiB, as that could fail the multipart request if - * it is not the last part. - */ -export function calculateEvenSplits( - size: number, - objInfo: T, -): { - startIndex: number[] - objInfo: T - endIndex: number[] -} | null { - if (size === 0) { - return null - } - const reqParts = partsRequired(size) - const startIndexParts: number[] = [] - const endIndexParts: number[] = [] - - let start = objInfo.Start - if (isEmpty(start) || start === -1) { - start = 0 - } - const divisorValue = Math.trunc(size / reqParts) - - const reminderValue = size % reqParts - - let nextStart = start - - for (let i = 0; i < reqParts; i++) { - let curPartSize = divisorValue - if (i < reminderValue) { - curPartSize++ - } - - const currentStart = nextStart - const currentEnd = currentStart + curPartSize - 1 - nextStart = currentEnd + 1 - - startIndexParts.push(currentStart) - endIndexParts.push(currentEnd) - } - - return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo } -} - -export function removeDirAndFiles(dirPath: string, removeSelf = true) { - let files - try { - files = fs.readdirSync(dirPath) - } catch (e) { - return - } - - for (const item of files) { - const filePath = path.join(dirPath, item) - if (fs.statSync(filePath).isFile()) { - fs.unlinkSync(filePath) - } else { - removeDirAndFiles(filePath, true) - } - } - - if (removeSelf) { - fs.rmdirSync(dirPath) - } -} - -const fxp = new XMLParser() - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export function parseXml(xml: string): any { - let result = fxp.parse(xml) - if (result.Error) { - throw result.Error - } - - return result -} - -/** - * maybe this should be a generic type for Records, leave it for later refactor - */ -export class SelectResults { - private records?: unknown - private response?: unknown - private stats?: string - private progress?: unknown - - constructor({ - records, // parsed data as stream - response, // original response stream - stats, // stats as xml - progress, // stats as xml - }: { - records?: unknown - response?: unknown - stats?: string - progress?: unknown - }) { - this.records = records - this.response = response - this.stats = stats - this.progress = progress - } - - setStats(stats: string) { - this.stats = stats - } - - getStats() { - return this.stats - } - - setProgress(progress: unknown) { - this.progress = progress - } - - getProgress() { - return this.progress - } - - setResponse(response: unknown) { - this.response = response - } - - getResponse() { - return this.response - } - - setRecords(records: unknown) { - this.records = records - } - - getRecords(): unknown { - return this.records - } -} - -export const DEFAULT_REGION = 'us-east-1' diff --git a/src/internal/assert.ts b/src/internal/assert.ts new file mode 100644 index 00000000..1fd2546b --- /dev/null +++ b/src/internal/assert.ts @@ -0,0 +1,592 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as crypto from 'node:crypto' +import * as stream from 'node:stream' + +import { isBrowser } from 'browser-or-node' +import { XMLParser } from 'fast-xml-parser' +import ipaddr from 'ipaddr.js' +import _ from 'lodash' +import mime from 'mime-types' + +import type { Binary, Encryption, MetaData, ResponseHeader } from './type.ts' +import { ENCRYPTION_TYPES } from './type.ts' + +/** + * All characters in string which are NOT unreserved should be percent encoded. + * Unreserved characters are : ALPHA / DIGIT / "-" / "." / "_" / "~" + * Reference https://tools.ietf.org/html/rfc3986#section-2.2 + */ +export function uriEscape(string: string) { + return string.split('').reduce((acc: string, elem: string) => { + const buf = Buffer.from(elem) + if (buf.length === 1) { + // length 1 indicates that elem is not a unicode character. + // Check if it is an unreserved characer. + if ( + ('A' <= elem && elem <= 'Z') || + ('a' <= elem && elem <= 'z') || + ('0' <= elem && elem <= '9') || + elem === '_' || + elem === '.' || + elem === '~' || + elem === '-' + ) { + // Unreserved characer should not be encoded. + acc = acc + elem + return acc + } + } + // elem needs encoding - i.e elem should be encoded if it's not unreserved + // character or if it's a unicode character. + for (const char of buf) { + acc = acc + '%' + char.toString(16).toUpperCase() + } + return acc + }, '') +} + +export function uriResourceEscape(string: string) { + return uriEscape(string).replace(/%2F/g, '/') +} + +export function getScope(region: string, date: Date, serviceName = 's3') { + return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request` +} + +/** + * isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' + */ +export function isAmazonEndpoint(endpoint: string) { + return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn' +} + +/** + * isVirtualHostStyle - verify if bucket name is support with virtual + * hosts. bucketNames with periods should be always treated as path + * style if the protocol is 'https:', this is due to SSL wildcard + * limitation. For all other buckets and Amazon S3 endpoint we will + * default to virtual host style. + */ +export function isVirtualHostStyle(endpoint: string, protocol: string, bucket: string, pathStyle: boolean) { + if (protocol === 'https:' && bucket.includes('.')) { + return false + } + return isAmazonEndpoint(endpoint) || !pathStyle +} + +export function isValidIP(ip: string) { + return ipaddr.isValid(ip) +} + +/** + * @returns if endpoint is valid domain. + */ +export function isValidEndpoint(endpoint: string) { + return isValidDomain(endpoint) || isValidIP(endpoint) +} + +/** + * @returns if input host is a valid domain. + */ +export function isValidDomain(host: string) { + if (!isString(host)) { + return false + } + // See RFC 1035, RFC 3696. + if (host.length === 0 || host.length > 255) { + return false + } + // Host cannot start or end with a '-' + if (host[0] === '-' || host.slice(-1) === '-') { + return false + } + // Host cannot start or end with a '_' + if (host[0] === '_' || host.slice(-1) === '_') { + return false + } + // Host cannot start with a '.' + if (host[0] === '.') { + return false + } + + const alphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:> 63) { + return false + } + // bucket with successive periods is invalid. + if (bucket.includes('..')) { + return false + } + // bucket cannot have ip address style. + if (/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/.test(bucket)) { + return false + } + // bucket should begin with alphabet/number and end with alphabet/number, + // with alphabet/number/.- in the middle. + if (/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/.test(bucket)) { + return true + } + return false +} + +/** + * check if objectName is a valid object name + */ +export function isValidObjectName(objectName: unknown) { + if (!isValidPrefix(objectName)) { + return false + } + + return objectName.length !== 0 +} + +/** + * check if prefix is valid + */ +export function isValidPrefix(prefix: unknown): prefix is string { + if (!isString(prefix)) { + return false + } + if (prefix.length > 1024) { + return false + } + return true +} + +/** + * check if typeof arg number + */ +export function isNumber(arg: unknown): arg is number { + return typeof arg === 'number' +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type AnyFunction = (...args: any[]) => any + +/** + * check if typeof arg function + */ +export function isFunction(arg: unknown): arg is AnyFunction { + return typeof arg === 'function' +} + +/** + * check if typeof arg string + */ +export function isString(arg: unknown): arg is string { + return typeof arg === 'string' +} + +/** + * check if typeof arg object + */ +export function isObject(arg: unknown): arg is object { + return typeof arg === 'object' && arg !== null +} + +/** + * check if object is readable stream + */ +export function isReadableStream(arg: unknown): arg is stream.Readable { + // eslint-disable-next-line @typescript-eslint/unbound-method + return isObject(arg) && isFunction((arg as stream.Readable)._read) +} + +/** + * check if arg is boolean + */ +export function isBoolean(arg: unknown): arg is boolean { + return typeof arg === 'boolean' +} + +export function isEmpty(o: unknown): o is null | undefined { + return _.isEmpty(o) +} + +export function isEmptyObject(o: Record): boolean { + return Object.values(o).filter((x) => x !== undefined).length !== 0 +} + +/** + * check if arg is a valid date + */ +export function isValidDate(arg: unknown): arg is Date { + // @ts-expect-error checknew Date(Math.NaN) + return arg instanceof Date && !isNaN(arg) +} + +/** + * Create a Date string with format: 'YYYYMMDDTHHmmss' + Z + */ +export function makeDateLong(date?: Date): string { + date = date || new Date() + + // Gives format like: '2017-08-07T16:28:59.889Z' + const s = date.toISOString() + + return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 13) + s.slice(14, 16) + s.slice(17, 19) + 'Z' +} + +/** + * Create a Date string with format: 'YYYYMMDD' + */ +export function makeDateShort(date?: Date) { + date = date || new Date() + + // Gives format like: '2017-08-07T16:28:59.889Z' + const s = date.toISOString() + + return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 10) +} + +/** + * pipesetup sets up pipe() from left to right os streams array + * pipesetup will also make sure that error emitted at any of the upstream Stream + * will be emitted at the last stream. This makes error handling simple + */ +export function pipesetup(...streams: [stream.Writable, ...stream.Duplex[], stream.Readable]) { + // @ts-expect-error ts can't narrow this + return streams.reduce((src: stream.Readable, dst: stream.Writable) => { + src.on('error', (err) => dst.emit('error', err)) + return src.pipe(dst) + }) +} + +/** + * return a Readable stream that emits data + */ +export function readableStream(data: unknown): stream.Readable { + const s = new stream.Readable() + s._read = () => {} + s.push(data) + s.push(null) + return s +} + +/** + * Process metadata to insert appropriate value to `content-type` attribute + */ +export function insertContentType(metaData: MetaData, filePath: string) { + // check if content-type attribute present in metaData + for (const key in metaData) { + if (key.toLowerCase() === 'content-type') { + return metaData + } + } + // if `content-type` attribute is not present in metadata, + // then infer it from the extension in filePath + const newMetadata = Object.assign({}, metaData) + newMetadata['content-type'] = probeContentType(filePath) + return newMetadata +} + +/** + * Function prepends metadata with the appropriate prefix if it is not already on + */ +export function prependXAMZMeta(metaData?: MetaData) { + if (!metaData) { + return {} + } + + const newMetadata = Object.assign({}, metaData) + for (const [key, value] of _.entries(metaData)) { + if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageClassHeader(key)) { + newMetadata['X-Amz-Meta-' + key] = value + delete newMetadata[key] + } + } + return newMetadata +} + +/** + * Checks if it is a valid header according to the AmazonS3 API + */ +export function isAmzHeader(key: string) { + const temp = key.toLowerCase() + return ( + temp.startsWith('x-amz-meta-') || + temp === 'x-amz-acl' || + temp.startsWith('x-amz-server-side-encryption-') || + temp === 'x-amz-server-side-encryption' + ) +} + +/** + * Checks if it is a supported Header + */ +export function isSupportedHeader(key: string) { + const supported_headers = [ + 'content-type', + 'cache-control', + 'content-encoding', + 'content-disposition', + 'content-language', + 'x-amz-website-redirect-location', + ] + return supported_headers.includes(key.toLowerCase()) +} + +/** + * Checks if it is a storage header + */ +export function isStorageClassHeader(key: string) { + return key.toLowerCase() === 'x-amz-storage-class' +} + +export function extractMetadata(headers: ResponseHeader) { + const newMetadata: Record = {} + for (const [key, value] of Object.entries(headers)) { + if (isSupportedHeader(key) || isStorageClassHeader(key) || isAmzHeader(key)) { + if (key.toLowerCase().startsWith('x-amz-meta-')) { + newMetadata[key.slice(11, key.length)] = value + } else { + newMetadata[key] = value + } + } + } + return newMetadata +} + +export function getVersionId(headers: ResponseHeader = {}) { + const versionIdValue = headers['x-amz-version-id'] as string + return versionIdValue || null +} + +export function getSourceVersionId(headers: ResponseHeader = {}) { + const sourceVersionId = headers['x-amz-copy-source-version-id'] + return sourceVersionId || null +} + +export function sanitizeETag(etag = ''): string { + const replaceChars: Record = { + '"': '', + '"': '', + '"': '', + '"': '', + '"': '', + } + return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m] as string) +} + +function objectToBuffer(payload: Binary): Buffer { + // don't know how to write this... + return Buffer.from(payload) +} + +export function toMd5(payload: Binary): string { + let payLoadBuf: Binary = objectToBuffer(payload) + // use string from browser and buffer from nodejs + // browser support is tested only against minio server + payLoadBuf = isBrowser ? payLoadBuf.toString() : payLoadBuf + return crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') +} + +export function toSha256(payload: Binary): string { + return crypto.createHash('sha256').update(payload).digest('hex') +} + +/** + * toArray returns a single element array with param being the element, + * if param is just a string, and returns 'param' back if it is an array + * So, it makes sure param is always an array + */ +export function toArray(param: T | T[]): Array { + if (!Array.isArray(param)) { + return [param] as T[] + } + return param +} + +export function sanitizeObjectKey(objectName: string): string { + // + symbol characters are not decoded as spaces in JS. so replace them first and decode to get the correct result. + const asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') + return decodeURIComponent(asStrName) +} + +export const PART_CONSTRAINTS = { + // absMinPartSize - absolute minimum part size (5 MiB) + ABS_MIN_PART_SIZE: 1024 * 1024 * 5, + // MIN_PART_SIZE - minimum part size 16MiB per object after which + MIN_PART_SIZE: 1024 * 1024 * 16, + // MAX_PARTS_COUNT - maximum number of parts for a single multipart session. + MAX_PARTS_COUNT: 10000, + // MAX_PART_SIZE - maximum part size 5GiB for a single multipart upload + // operation. + MAX_PART_SIZE: 1024 * 1024 * 1024 * 5, + // MAX_SINGLE_PUT_OBJECT_SIZE - maximum size 5GiB of object per PUT + // operation. + MAX_SINGLE_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 5, + // MAX_MULTIPART_PUT_OBJECT_SIZE - maximum size 5TiB of object for + // Multipart operation. + MAX_MULTIPART_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 1024 * 5, +} + +const GENERIC_SSE_HEADER = 'X-Amz-Server-Side-Encryption' + +const ENCRYPTION_HEADERS = { + // sseGenericHeader is the AWS SSE header used for SSE-S3 and SSE-KMS. + sseGenericHeader: GENERIC_SSE_HEADER, + // sseKmsKeyID is the AWS SSE-KMS key id. + sseKmsKeyID: GENERIC_SSE_HEADER + '-Aws-Kms-Key-Id', +} as const + +/** + * Return Encryption headers + * @param encConfig + * @returns an object with key value pairs that can be used in headers. + */ +export function getEncryptionHeaders(encConfig: Encryption): Record { + const encType = encConfig.type + const encHeaders = {} + if (!isEmpty(encType)) { + if (encType === ENCRYPTION_TYPES.SSEC) { + return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + [encHeaders[ENCRYPTION_HEADERS.sseGenericHeader]]: 'AES256', + } + } else if (encType === ENCRYPTION_TYPES.KMS) { + return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + [ENCRYPTION_HEADERS.sseGenericHeader]: encConfig.SSEAlgorithm, + [ENCRYPTION_HEADERS.sseKmsKeyID]: encConfig.KMSMasterKeyID, + } + } + } + + return encHeaders +} + +export function partsRequired(size: number): number { + const maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) + let requiredPartSize = size / maxPartSize + if (size % maxPartSize > 0) { + requiredPartSize++ + } + requiredPartSize = Math.trunc(requiredPartSize) + return requiredPartSize +} + +/** + * calculateEvenSplits - computes splits for a source and returns + * start and end index slices. Splits happen evenly to be sure that no + * part is less than 5MiB, as that could fail the multipart request if + * it is not the last part. + */ +export function calculateEvenSplits( + size: number, + objInfo: T, +): { + startIndex: number[] + objInfo: T + endIndex: number[] +} | null { + if (size === 0) { + return null + } + const reqParts = partsRequired(size) + const startIndexParts: number[] = [] + const endIndexParts: number[] = [] + + let start = objInfo.Start + if (isEmpty(start) || start === -1) { + start = 0 + } + const divisorValue = Math.trunc(size / reqParts) + + const reminderValue = size % reqParts + + let nextStart = start + + for (let i = 0; i < reqParts; i++) { + let curPartSize = divisorValue + if (i < reminderValue) { + curPartSize++ + } + + const currentStart = nextStart + const currentEnd = currentStart + curPartSize - 1 + nextStart = currentEnd + 1 + + startIndexParts.push(currentStart) + endIndexParts.push(currentEnd) + } + + return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo } +} + +const fxp = new XMLParser() + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function parseXml(xml: string): any { + let result = fxp.parse(xml) + if (result.Error) { + throw result.Error + } + + return result +} diff --git a/src/type.ts b/src/internal/type.ts similarity index 50% rename from src/type.ts rename to src/internal/type.ts index 39917a44..62bb9bad 100644 --- a/src/type.ts +++ b/src/internal/type.ts @@ -1,9 +1,19 @@ export type Binary = string | Buffer -export type Mode = 'COMPLIANCE' | 'GOVERNANCE' - // nodejs IncomingHttpHeaders is Record, but it's actually this: export type ResponseHeader = Record export type MetaData = Record export type Header = Record +export type Encryption = { + type: string + SSEAlgorithm?: string + KMSMasterKeyID?: string +} + +export enum ENCRYPTION_TYPES { + // SSEC represents server-side-encryption with customer provided keys + SSEC = 'SSE-C', + // KMS represents server-side-encryption with managed keys + KMS = 'KMS', +} diff --git a/src/minio.d.ts b/src/minio.d.ts index 2c104e88..0c0d6144 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -9,6 +9,7 @@ import type { Region } from './s3-endpoints.ts' export * from './helpers.ts' export type { Region } from './s3-endpoints.ts' +import type { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './helpers.ts' // Exports only from typings export type NotificationEvent = @@ -29,9 +30,22 @@ export type NotificationEvent = | 's3:Replication:OperationReplicatedAfterThreshold' | 's3:Replication:OperationNotTracked' | string -export type Mode = 'COMPLIANCE' | 'GOVERNANCE' -export type LockUnit = 'Days' | 'Years' -export type LegalHoldStatus = 'ON' | 'OFF' + +/** + * @deprecated keep for backward compatible + */ +export type Mode = RETENTION_MODES + +/** + * @deprecated keep for backward compatible + */ +export type LockUnit = RETENTION_VALIDITY_UNITS + +/** + * @deprecated keep for backward compatible + */ +export type LegalHoldStatus = LEGAL_HOLD_STATUS + export type NoResultCallback = (error: Error | null) => void export type ResultCallback = (error: Error | null, result: T) => void export type VersioningConfig = Record @@ -141,8 +155,8 @@ export interface LifecycleRule { } export interface LockConfig { - mode: Mode - unit: LockUnit + mode: RETENTION_MODES + unit: RETENTION_VALIDITY_UNITS validity: number } diff --git a/src/minio.js b/src/minio.js index a8835fa3..35a146e0 100644 --- a/src/minio.js +++ b/src/minio.js @@ -33,10 +33,15 @@ import { CredentialProvider } from './CredentialProvider.js' import * as errors from './errors.ts' import { extensions } from './extensions.js' import { - calculateEvenSplits, CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, + LEGAL_HOLD_STATUS, + RETENTION_MODES, + RETENTION_VALIDITY_UNITS, +} from './helpers.ts' +import { + calculateEvenSplits, extractMetadata, getScope, getSourceVersionId, @@ -56,21 +61,18 @@ import { isValidPort, isValidPrefix, isVirtualHostStyle, - LEGAL_HOLD_STATUS, makeDateLong, PART_CONSTRAINTS, partsRequired, pipesetup, prependXAMZMeta, readableStream, - RETENTION_MODES, - RETENTION_VALIDITY_UNITS, sanitizeETag, toMd5, toSha256, uriEscape, uriResourceEscape, -} from './helpers.ts' +} from './internal/assert.ts' import { NotificationConfig, NotificationPoller } from './notification.js' import { ObjectUploader } from './object-uploader.js' import { promisify } from './promisify.js' @@ -78,7 +80,6 @@ import { getS3Endpoint } from './s3-endpoints.ts' import { postPresignSignatureV4, presignSignatureV4, signV4 } from './signing.js' import * as transformers from './transformers.js' import { parseSelectObjectContentResponse } from './xml-parsers.js' - // will be replaced by bundler const Package = { version: process.env.MINIO_JS_PACKAGE_VERSION || 'development' } diff --git a/src/notification.js b/src/notification.js index 9ceb2b03..ef5db128 100644 --- a/src/notification.js +++ b/src/notification.js @@ -16,7 +16,8 @@ import { EventEmitter } from 'node:events' -import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.ts' +import { DEFAULT_REGION } from './helpers.ts' +import { pipesetup, uriEscape } from './internal/assert.ts' import * as transformers from './transformers.js' // Notification config - array of target configs. diff --git a/src/object-uploader.js b/src/object-uploader.js index 7c3dcaff..5220987a 100644 --- a/src/object-uploader.js +++ b/src/object-uploader.js @@ -19,7 +19,7 @@ import { Transform } from 'node:stream' import * as querystring from 'query-string' -import { getVersionId, sanitizeETag } from './helpers.ts' +import { getVersionId, sanitizeETag } from './internal/assert.ts' // We extend Transform because Writable does not implement ._flush(). export class ObjectUploader extends Transform { diff --git a/src/s3-endpoints.ts b/src/s3-endpoints.ts index 36a424e5..615b069a 100644 --- a/src/s3-endpoints.ts +++ b/src/s3-endpoints.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { isString } from './helpers.ts' +import { isString } from './internal/assert.ts' // List of currently supported endpoints. const awsS3Endpoint = { diff --git a/src/select-results.ts b/src/select-results.ts new file mode 100644 index 00000000..eab0176c --- /dev/null +++ b/src/select-results.ts @@ -0,0 +1,58 @@ +/** + * maybe this should be a generic type for Records, leave it for later refactor + */ +export class SelectResults { + private records?: unknown + private response?: unknown + private stats?: string + private progress?: unknown + + constructor({ + records, // parsed data as stream + response, // original response stream + stats, // stats as xml + progress, // stats as xml + }: { + records?: unknown + response?: unknown + stats?: string + progress?: unknown + }) { + this.records = records + this.response = response + this.stats = stats + this.progress = progress + } + + setStats(stats: string) { + this.stats = stats + } + + getStats() { + return this.stats + } + + setProgress(progress: unknown) { + this.progress = progress + } + + getProgress() { + return this.progress + } + + setResponse(response: unknown) { + this.response = response + } + + getResponse() { + return this.response + } + + setRecords(records: unknown) { + this.records = records + } + + getRecords(): unknown { + return this.records + } +} diff --git a/src/signing.js b/src/signing.js index aaf0251a..41f23015 100644 --- a/src/signing.js +++ b/src/signing.js @@ -19,7 +19,7 @@ import * as Crypto from 'node:crypto' import _ from 'lodash' import * as errors from './errors.ts' -import { getScope, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.ts' +import { getScope, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './internal/assert.ts' const signV4Algorithm = 'AWS4-HMAC-SHA256' @@ -232,6 +232,7 @@ export function signV4(request, accessKey, secretKey, region, requestDate, servi export function signV4ByServiceName(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { return signV4(request, accessKey, secretKey, region, requestDate, serviceName) } + // returns a presigned URL string export function presignSignatureV4(request, accessKey, secretKey, sessionToken, region, requestDate, expires) { if (!isObject(request)) { diff --git a/src/transformers.js b/src/transformers.js index 34ddee3d..dd76b002 100644 --- a/src/transformers.js +++ b/src/transformers.js @@ -21,7 +21,7 @@ import _ from 'lodash' import Through2 from 'through2' import * as errors from './errors.ts' -import { isFunction } from './helpers.ts' +import { isFunction } from './internal/assert.ts' import * as xmlParsers from './xml-parsers.js' // getConcater returns a stream that concatenates the input and emits diff --git a/src/xml-parsers.js b/src/xml-parsers.js index 022eed15..5d136a4e 100644 --- a/src/xml-parsers.js +++ b/src/xml-parsers.js @@ -19,16 +19,9 @@ import { XMLParser } from 'fast-xml-parser' import _ from 'lodash' import * as errors from './errors.ts' -import { - isObject, - parseXml, - readableStream, - RETENTION_VALIDITY_UNITS, - sanitizeETag, - sanitizeObjectKey, - SelectResults, - toArray, -} from './helpers.ts' +import { RETENTION_VALIDITY_UNITS } from './helpers.ts' +import { isObject, parseXml, readableStream, sanitizeETag, sanitizeObjectKey, toArray } from './internal/assert.ts' +import { SelectResults } from './select-results.ts' // Parse XML and return information as Javascript types const fxp = new XMLParser() diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index cb1ca681..06be7856 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -31,13 +31,8 @@ import superagent from 'superagent' import * as uuid from 'uuid' import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.js' -import { - CopyDestinationOptions, - CopySourceOptions, - DEFAULT_REGION, - getVersionId, - removeDirAndFiles, -} from '../../src/helpers.ts' +import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, removeDirAndFiles } from '../../src/helpers.ts' +import { getVersionId } from '../../src/internal/assert.ts' import * as minio from '../../src/minio.js' const assert = chai.assert diff --git a/tests/unit/test.js b/tests/unit/test.js index d3a94ee5..6dc767fa 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -19,16 +19,15 @@ import * as Stream from 'node:stream' import { assert } from 'chai' import Nock from 'nock' +import { CopyDestinationOptions, CopySourceOptions } from '../../src/helpers.ts' import { calculateEvenSplits, - CopyDestinationOptions, - CopySourceOptions, isValidEndpoint, isValidIP, makeDateLong, makeDateShort, partsRequired, -} from '../../src/helpers.ts' +} from '../../src/internal/assert.ts' import * as Minio from '../../src/minio.js' const Package = { version: 'development' } From 9959a78fd48e65c9d904ea09ddeac10e3980d3a0 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 20:05:42 +0800 Subject: [PATCH 36/78] use enum --- src/helpers.ts | 18 +++--------------- src/internal/type.ts | 23 +++++++++++++++++++++-- src/minio.d.ts | 5 ++++- src/minio.js | 10 ++-------- src/xml-parsers.js | 2 +- 5 files changed, 31 insertions(+), 27 deletions(-) diff --git a/src/helpers.ts b/src/helpers.ts index c2340d0a..cd25dc78 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -14,7 +14,10 @@ import { isValidObjectName, } from './internal/assert.ts' import type { Encryption, MetaData } from './internal/type.ts' +import { RETENTION_MODES } from './internal/type.ts' + export { CopySourceOptions } from './copy-source-options.ts' +export { ENCRYPTION_TYPES, LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' export { SelectResults } from './select-results.ts' export const DEFAULT_REGION = 'us-east-1' @@ -41,21 +44,6 @@ export function removeDirAndFiles(dirPath: string, removeSelf = true) { } } -export enum RETENTION_MODES { - GOVERNANCE = 'GOVERNANCE', - COMPLIANCE = 'COMPLIANCE', -} - -export enum RETENTION_VALIDITY_UNITS { - DAYS = 'Days', - YEARS = 'Years', -} - -export enum LEGAL_HOLD_STATUS { - ENABLED = 'ON', - DISABLED = 'OFF', -} - export class CopyDestinationOptions { public readonly Bucket: string public readonly Object: string diff --git a/src/internal/type.ts b/src/internal/type.ts index 62bb9bad..7634ba1e 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -12,8 +12,27 @@ export type Encryption = { } export enum ENCRYPTION_TYPES { - // SSEC represents server-side-encryption with customer provided keys + /** + * SSEC represents server-side-encryption with customer provided keys + */ SSEC = 'SSE-C', - // KMS represents server-side-encryption with managed keys + /** + * KMS represents server-side-encryption with managed keys + */ KMS = 'KMS', } + +export enum RETENTION_MODES { + GOVERNANCE = 'GOVERNANCE', + COMPLIANCE = 'COMPLIANCE', +} + +export enum RETENTION_VALIDITY_UNITS { + DAYS = 'Days', + YEARS = 'Years', +} + +export enum LEGAL_HOLD_STATUS { + ENABLED = 'ON', + DISABLED = 'OFF', +} diff --git a/src/minio.d.ts b/src/minio.d.ts index 0c0d6144..3156dd06 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -9,7 +9,7 @@ import type { Region } from './s3-endpoints.ts' export * from './helpers.ts' export type { Region } from './s3-endpoints.ts' -import type { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './helpers.ts' +import type { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' // Exports only from typings export type NotificationEvent = @@ -729,3 +729,6 @@ export declare const ObjectRemovedAll: NotificationEvent // s3:ObjectRemoved:* export declare const ObjectRemovedDelete: NotificationEvent // s3:ObjectRemoved:Delete export declare const ObjectRemovedDeleteMarkerCreated: NotificationEvent // s3:ObjectRemoved:DeleteMarkerCreated export declare const ObjectReducedRedundancyLostObject: NotificationEvent // s3:ReducedRedundancyLostObject +export { LEGAL_HOLD_STATUS } from './internal/type.ts' +export { RETENTION_VALIDITY_UNITS } from './internal/type.ts' +export { RETENTION_MODES } from './internal/type.ts' diff --git a/src/minio.js b/src/minio.js index 35a146e0..c3fe3717 100644 --- a/src/minio.js +++ b/src/minio.js @@ -32,14 +32,7 @@ import xml2js from 'xml2js' import { CredentialProvider } from './CredentialProvider.js' import * as errors from './errors.ts' import { extensions } from './extensions.js' -import { - CopyDestinationOptions, - CopySourceOptions, - DEFAULT_REGION, - LEGAL_HOLD_STATUS, - RETENTION_MODES, - RETENTION_VALIDITY_UNITS, -} from './helpers.ts' +import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION } from './helpers.ts' import { calculateEvenSplits, extractMetadata, @@ -73,6 +66,7 @@ import { uriEscape, uriResourceEscape, } from './internal/assert.ts' +import { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' import { NotificationConfig, NotificationPoller } from './notification.js' import { ObjectUploader } from './object-uploader.js' import { promisify } from './promisify.js' diff --git a/src/xml-parsers.js b/src/xml-parsers.js index 5d136a4e..b01f54e6 100644 --- a/src/xml-parsers.js +++ b/src/xml-parsers.js @@ -19,8 +19,8 @@ import { XMLParser } from 'fast-xml-parser' import _ from 'lodash' import * as errors from './errors.ts' -import { RETENTION_VALIDITY_UNITS } from './helpers.ts' import { isObject, parseXml, readableStream, sanitizeETag, sanitizeObjectKey, toArray } from './internal/assert.ts' +import { RETENTION_VALIDITY_UNITS } from './internal/type.ts' import { SelectResults } from './select-results.ts' // Parse XML and return information as Javascript types From ebb50aa590f30fc9ddf60be606810bd28adf5cf0 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 20:07:17 +0800 Subject: [PATCH 37/78] filename --- src/AssumeRoleProvider.js | 2 +- src/copy-source-options.ts | 2 +- src/extensions.js | 2 +- src/helpers.ts | 2 +- src/internal/{assert.ts => helper.ts} | 0 src/minio.js | 2 +- src/notification.js | 2 +- src/object-uploader.js | 2 +- src/s3-endpoints.ts | 2 +- src/signing.js | 2 +- src/transformers.js | 2 +- src/xml-parsers.js | 2 +- tests/functional/functional-tests.js | 2 +- tests/unit/test.js | 2 +- 14 files changed, 13 insertions(+), 13 deletions(-) rename src/internal/{assert.ts => helper.ts} (100%) diff --git a/src/AssumeRoleProvider.js b/src/AssumeRoleProvider.js index 23ad7e56..3033aadf 100644 --- a/src/AssumeRoleProvider.js +++ b/src/AssumeRoleProvider.js @@ -4,7 +4,7 @@ import { URL, URLSearchParams } from 'node:url' import { CredentialProvider } from './CredentialProvider.js' import { Credentials } from './Credentials.js' -import { makeDateLong, parseXml, toSha256 } from './internal/assert.ts' +import { makeDateLong, parseXml, toSha256 } from './internal/helper.ts' import { signV4ByServiceName } from './signing.js' export class AssumeRoleProvider extends CredentialProvider { diff --git a/src/copy-source-options.ts b/src/copy-source-options.ts index 94198a74..2d36a3a0 100644 --- a/src/copy-source-options.ts +++ b/src/copy-source-options.ts @@ -1,5 +1,5 @@ import * as errors from './errors.ts' -import { isEmpty, isNumber, isValidBucketName, isValidObjectName } from './internal/assert.ts' +import { isEmpty, isNumber, isValidBucketName, isValidObjectName } from './internal/helper.ts' import type { Encryption, Header } from './internal/type.ts' export class CopySourceOptions { diff --git a/src/extensions.js b/src/extensions.js index d45ad3d6..70caa471 100644 --- a/src/extensions.js +++ b/src/extensions.js @@ -25,7 +25,7 @@ import { isValidPrefix, pipesetup, uriEscape, -} from './internal/assert.ts' +} from './internal/helper.ts' import * as transformers from './transformers.js' export class extensions { diff --git a/src/helpers.ts b/src/helpers.ts index cd25dc78..3699bccb 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -12,7 +12,7 @@ import { isString, isValidBucketName, isValidObjectName, -} from './internal/assert.ts' +} from './internal/helper.ts' import type { Encryption, MetaData } from './internal/type.ts' import { RETENTION_MODES } from './internal/type.ts' diff --git a/src/internal/assert.ts b/src/internal/helper.ts similarity index 100% rename from src/internal/assert.ts rename to src/internal/helper.ts diff --git a/src/minio.js b/src/minio.js index c3fe3717..47d11ee4 100644 --- a/src/minio.js +++ b/src/minio.js @@ -65,7 +65,7 @@ import { toSha256, uriEscape, uriResourceEscape, -} from './internal/assert.ts' +} from './internal/helper.ts' import { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' import { NotificationConfig, NotificationPoller } from './notification.js' import { ObjectUploader } from './object-uploader.js' diff --git a/src/notification.js b/src/notification.js index ef5db128..2c3b45c3 100644 --- a/src/notification.js +++ b/src/notification.js @@ -17,7 +17,7 @@ import { EventEmitter } from 'node:events' import { DEFAULT_REGION } from './helpers.ts' -import { pipesetup, uriEscape } from './internal/assert.ts' +import { pipesetup, uriEscape } from './internal/helper.ts' import * as transformers from './transformers.js' // Notification config - array of target configs. diff --git a/src/object-uploader.js b/src/object-uploader.js index 5220987a..b14f75c1 100644 --- a/src/object-uploader.js +++ b/src/object-uploader.js @@ -19,7 +19,7 @@ import { Transform } from 'node:stream' import * as querystring from 'query-string' -import { getVersionId, sanitizeETag } from './internal/assert.ts' +import { getVersionId, sanitizeETag } from './internal/helper.ts' // We extend Transform because Writable does not implement ._flush(). export class ObjectUploader extends Transform { diff --git a/src/s3-endpoints.ts b/src/s3-endpoints.ts index 615b069a..18b171ef 100644 --- a/src/s3-endpoints.ts +++ b/src/s3-endpoints.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { isString } from './internal/assert.ts' +import { isString } from './internal/helper.ts' // List of currently supported endpoints. const awsS3Endpoint = { diff --git a/src/signing.js b/src/signing.js index 41f23015..c7bf5238 100644 --- a/src/signing.js +++ b/src/signing.js @@ -19,7 +19,7 @@ import * as Crypto from 'node:crypto' import _ from 'lodash' import * as errors from './errors.ts' -import { getScope, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './internal/assert.ts' +import { getScope, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './internal/helper.ts' const signV4Algorithm = 'AWS4-HMAC-SHA256' diff --git a/src/transformers.js b/src/transformers.js index dd76b002..bf4d0db7 100644 --- a/src/transformers.js +++ b/src/transformers.js @@ -21,7 +21,7 @@ import _ from 'lodash' import Through2 from 'through2' import * as errors from './errors.ts' -import { isFunction } from './internal/assert.ts' +import { isFunction } from './internal/helper.ts' import * as xmlParsers from './xml-parsers.js' // getConcater returns a stream that concatenates the input and emits diff --git a/src/xml-parsers.js b/src/xml-parsers.js index b01f54e6..c842f712 100644 --- a/src/xml-parsers.js +++ b/src/xml-parsers.js @@ -19,7 +19,7 @@ import { XMLParser } from 'fast-xml-parser' import _ from 'lodash' import * as errors from './errors.ts' -import { isObject, parseXml, readableStream, sanitizeETag, sanitizeObjectKey, toArray } from './internal/assert.ts' +import { isObject, parseXml, readableStream, sanitizeETag, sanitizeObjectKey, toArray } from './internal/helper.ts' import { RETENTION_VALIDITY_UNITS } from './internal/type.ts' import { SelectResults } from './select-results.ts' diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index 06be7856..5c52d42b 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -32,7 +32,7 @@ import * as uuid from 'uuid' import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.js' import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, removeDirAndFiles } from '../../src/helpers.ts' -import { getVersionId } from '../../src/internal/assert.ts' +import { getVersionId } from '../../src/internal/helper.ts' import * as minio from '../../src/minio.js' const assert = chai.assert diff --git a/tests/unit/test.js b/tests/unit/test.js index 6dc767fa..02f1cc35 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -27,7 +27,7 @@ import { makeDateLong, makeDateShort, partsRequired, -} from '../../src/internal/assert.ts' +} from '../../src/internal/helper.ts' import * as Minio from '../../src/minio.js' const Package = { version: 'development' } From a45f7e0d707cfca4000a4b507cd1a45dc10e3b4f Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 20:17:53 +0800 Subject: [PATCH 38/78] diff --- src/minio.d.ts | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/minio.d.ts b/src/minio.d.ts index 3156dd06..9802572d 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -4,12 +4,17 @@ import { EventEmitter } from 'node:events' import type { RequestOptions } from 'node:https' import type { Readable as ReadableStream } from 'node:stream' -import type { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' +import type { + CopyDestinationOptions, + CopySourceOptions, + LEGAL_HOLD_STATUS, + RETENTION_MODES, + RETENTION_VALIDITY_UNITS, +} from './helpers.ts' import type { Region } from './s3-endpoints.ts' export * from './helpers.ts' export type { Region } from './s3-endpoints.ts' -import type { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' // Exports only from typings export type NotificationEvent = @@ -729,6 +734,3 @@ export declare const ObjectRemovedAll: NotificationEvent // s3:ObjectRemoved:* export declare const ObjectRemovedDelete: NotificationEvent // s3:ObjectRemoved:Delete export declare const ObjectRemovedDeleteMarkerCreated: NotificationEvent // s3:ObjectRemoved:DeleteMarkerCreated export declare const ObjectReducedRedundancyLostObject: NotificationEvent // s3:ReducedRedundancyLostObject -export { LEGAL_HOLD_STATUS } from './internal/type.ts' -export { RETENTION_VALIDITY_UNITS } from './internal/type.ts' -export { RETENTION_MODES } from './internal/type.ts' From ca38e3877ee1de9ffdc23f5039bd1e2f4ee81fd7 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 20:19:21 +0800 Subject: [PATCH 39/78] deprecated type --- src/minio.d.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/minio.d.ts b/src/minio.d.ts index 9802572d..6d105474 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -37,7 +37,7 @@ export type NotificationEvent = | string /** - * @deprecated keep for backward compatible + * @deprecated keep for backward compatible, use `RETENTION_MODES` instead */ export type Mode = RETENTION_MODES @@ -184,7 +184,7 @@ export interface ReplicationConfig { export interface RetentionOptions { versionId: string - mode?: Mode + mode?: RETENTION_MODES retainUntilDate?: IsoDate governanceBypass?: boolean } From deb282ea2325b6719f68802979432219fce783a9 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 20:24:25 +0800 Subject: [PATCH 40/78] helper file --- src/copy-source-options.ts | 112 ------------------------ src/helpers.ts | 173 ++++++++++++++++++++++++++++++++++++- src/select-results.ts | 58 ------------- 3 files changed, 170 insertions(+), 173 deletions(-) delete mode 100644 src/copy-source-options.ts delete mode 100644 src/select-results.ts diff --git a/src/copy-source-options.ts b/src/copy-source-options.ts deleted file mode 100644 index 2d36a3a0..00000000 --- a/src/copy-source-options.ts +++ /dev/null @@ -1,112 +0,0 @@ -import * as errors from './errors.ts' -import { isEmpty, isNumber, isValidBucketName, isValidObjectName } from './internal/helper.ts' -import type { Encryption, Header } from './internal/type.ts' - -export class CopySourceOptions { - public readonly Bucket: string - public readonly Object: string - public readonly VersionID: string - public MatchETag: string - private readonly NoMatchETag: string - private readonly MatchModifiedSince: string | null - private readonly MatchUnmodifiedSince: string | null - public readonly MatchRange: boolean - public readonly Start: number - public readonly End: number - private readonly Encryption?: Encryption - - /** - * - * @param Bucket - Bucket Name - * @param Object - Object Name - * @param VersionID - Valid versionId - * @param MatchETag - Etag to match - * @param NoMatchETag - Etag to exclude - * @param MatchModifiedSince - Modified Date of the object/part. UTC Date in string format - * @param MatchUnmodifiedSince - Modified Date of the object/part to exclude UTC Date in string format - * @param MatchRange - true or false Object range to match - * @param Start - * @param End - * @param Encryption - */ - constructor({ - Bucket = '', - Object = '', - VersionID = '', - MatchETag = '', - NoMatchETag = '', - MatchModifiedSince = null, - MatchUnmodifiedSince = null, - MatchRange = false, - Start = 0, - End = 0, - Encryption = undefined, - }: { - Bucket?: string - Object?: string - VersionID?: string - MatchETag?: string - NoMatchETag?: string - MatchModifiedSince?: string | null - MatchUnmodifiedSince?: string | null - MatchRange?: boolean - Start?: number - End?: number - Encryption?: Encryption - } = {}) { - this.Bucket = Bucket - this.Object = Object - this.VersionID = VersionID - this.MatchETag = MatchETag - this.NoMatchETag = NoMatchETag - this.MatchModifiedSince = MatchModifiedSince - this.MatchUnmodifiedSince = MatchUnmodifiedSince - this.MatchRange = MatchRange - this.Start = Start - this.End = End - this.Encryption = Encryption - } - - validate() { - if (!isValidBucketName(this.Bucket)) { - throw new errors.InvalidBucketNameError('Invalid Source bucket name: ' + this.Bucket) - } - if (!isValidObjectName(this.Object)) { - throw new errors.InvalidObjectNameError(`Invalid Source object name: ${this.Object}`) - } - if ((this.MatchRange && this.Start !== -1 && this.End !== -1 && this.Start > this.End) || this.Start < 0) { - throw new errors.InvalidObjectNameError('Source start must be non-negative, and start must be at most end.') - } else if ((this.MatchRange && !isNumber(this.Start)) || !isNumber(this.End)) { - throw new errors.InvalidObjectNameError( - 'MatchRange is specified. But Invalid Start and End values are specified. ', - ) - } - - return true - } - - getHeaders() { - const headerOptions: Header = {} - headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) - - if (!isEmpty(this.VersionID)) { - headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID - } - - if (!isEmpty(this.MatchETag)) { - headerOptions['x-amz-copy-source-if-match'] = this.MatchETag - } - if (!isEmpty(this.NoMatchETag)) { - headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag - } - - if (!isEmpty(this.MatchModifiedSince)) { - headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince - } - if (!isEmpty(this.MatchUnmodifiedSince)) { - headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince - } - - return headerOptions - } -} diff --git a/src/helpers.ts b/src/helpers.ts index 3699bccb..c633be4d 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -8,20 +8,128 @@ import { getEncryptionHeaders, isEmpty, isEmptyObject, + isNumber, isObject, isString, isValidBucketName, isValidObjectName, } from './internal/helper.ts' -import type { Encryption, MetaData } from './internal/type.ts' +import type { Encryption, Header, MetaData } from './internal/type.ts' import { RETENTION_MODES } from './internal/type.ts' -export { CopySourceOptions } from './copy-source-options.ts' export { ENCRYPTION_TYPES, LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' -export { SelectResults } from './select-results.ts' export const DEFAULT_REGION = 'us-east-1' +export class CopySourceOptions { + public readonly Bucket: string + public readonly Object: string + public readonly VersionID: string + public MatchETag: string + private readonly NoMatchETag: string + private readonly MatchModifiedSince: string | null + private readonly MatchUnmodifiedSince: string | null + public readonly MatchRange: boolean + public readonly Start: number + public readonly End: number + private readonly Encryption?: Encryption + + /** + * + * @param Bucket - Bucket Name + * @param Object - Object Name + * @param VersionID - Valid versionId + * @param MatchETag - Etag to match + * @param NoMatchETag - Etag to exclude + * @param MatchModifiedSince - Modified Date of the object/part. UTC Date in string format + * @param MatchUnmodifiedSince - Modified Date of the object/part to exclude UTC Date in string format + * @param MatchRange - true or false Object range to match + * @param Start + * @param End + * @param Encryption + */ + constructor({ + Bucket = '', + Object = '', + VersionID = '', + MatchETag = '', + NoMatchETag = '', + MatchModifiedSince = null, + MatchUnmodifiedSince = null, + MatchRange = false, + Start = 0, + End = 0, + Encryption = undefined, + }: { + Bucket?: string + Object?: string + VersionID?: string + MatchETag?: string + NoMatchETag?: string + MatchModifiedSince?: string | null + MatchUnmodifiedSince?: string | null + MatchRange?: boolean + Start?: number + End?: number + Encryption?: Encryption + } = {}) { + this.Bucket = Bucket + this.Object = Object + this.VersionID = VersionID + this.MatchETag = MatchETag + this.NoMatchETag = NoMatchETag + this.MatchModifiedSince = MatchModifiedSince + this.MatchUnmodifiedSince = MatchUnmodifiedSince + this.MatchRange = MatchRange + this.Start = Start + this.End = End + this.Encryption = Encryption + } + + validate() { + if (!isValidBucketName(this.Bucket)) { + throw new errors.InvalidBucketNameError('Invalid Source bucket name: ' + this.Bucket) + } + if (!isValidObjectName(this.Object)) { + throw new errors.InvalidObjectNameError(`Invalid Source object name: ${this.Object}`) + } + if ((this.MatchRange && this.Start !== -1 && this.End !== -1 && this.Start > this.End) || this.Start < 0) { + throw new errors.InvalidObjectNameError('Source start must be non-negative, and start must be at most end.') + } else if ((this.MatchRange && !isNumber(this.Start)) || !isNumber(this.End)) { + throw new errors.InvalidObjectNameError( + 'MatchRange is specified. But Invalid Start and End values are specified. ', + ) + } + + return true + } + + getHeaders() { + const headerOptions: Header = {} + headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + + if (!isEmpty(this.VersionID)) { + headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID + } + + if (!isEmpty(this.MatchETag)) { + headerOptions['x-amz-copy-source-if-match'] = this.MatchETag + } + if (!isEmpty(this.NoMatchETag)) { + headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag + } + + if (!isEmpty(this.MatchModifiedSince)) { + headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince + } + if (!isEmpty(this.MatchUnmodifiedSince)) { + headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince + } + + return headerOptions + } +} + export function removeDirAndFiles(dirPath: string, removeSelf = true) { let files try { @@ -157,3 +265,62 @@ export class CopyDestinationOptions { return true } } + +/** + * maybe this should be a generic type for Records, leave it for later refactor + */ +export class SelectResults { + private records?: unknown + private response?: unknown + private stats?: string + private progress?: unknown + + constructor({ + records, // parsed data as stream + response, // original response stream + stats, // stats as xml + progress, // stats as xml + }: { + records?: unknown + response?: unknown + stats?: string + progress?: unknown + }) { + this.records = records + this.response = response + this.stats = stats + this.progress = progress + } + + setStats(stats: string) { + this.stats = stats + } + + getStats() { + return this.stats + } + + setProgress(progress: unknown) { + this.progress = progress + } + + getProgress() { + return this.progress + } + + setResponse(response: unknown) { + this.response = response + } + + getResponse() { + return this.response + } + + setRecords(records: unknown) { + this.records = records + } + + getRecords(): unknown { + return this.records + } +} diff --git a/src/select-results.ts b/src/select-results.ts deleted file mode 100644 index eab0176c..00000000 --- a/src/select-results.ts +++ /dev/null @@ -1,58 +0,0 @@ -/** - * maybe this should be a generic type for Records, leave it for later refactor - */ -export class SelectResults { - private records?: unknown - private response?: unknown - private stats?: string - private progress?: unknown - - constructor({ - records, // parsed data as stream - response, // original response stream - stats, // stats as xml - progress, // stats as xml - }: { - records?: unknown - response?: unknown - stats?: string - progress?: unknown - }) { - this.records = records - this.response = response - this.stats = stats - this.progress = progress - } - - setStats(stats: string) { - this.stats = stats - } - - getStats() { - return this.stats - } - - setProgress(progress: unknown) { - this.progress = progress - } - - getProgress() { - return this.progress - } - - setResponse(response: unknown) { - this.response = response - } - - getResponse() { - return this.response - } - - setRecords(records: unknown) { - this.records = records - } - - getRecords(): unknown { - return this.records - } -} From 767303e980f3476c3d26360cc6a4383ef551bb7d Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 20:27:14 +0800 Subject: [PATCH 41/78] fix --- src/xml-parsers.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/xml-parsers.js b/src/xml-parsers.js index c842f712..994af5a7 100644 --- a/src/xml-parsers.js +++ b/src/xml-parsers.js @@ -19,9 +19,9 @@ import { XMLParser } from 'fast-xml-parser' import _ from 'lodash' import * as errors from './errors.ts' +import { SelectResults } from './helpers.ts' import { isObject, parseXml, readableStream, sanitizeETag, sanitizeObjectKey, toArray } from './internal/helper.ts' import { RETENTION_VALIDITY_UNITS } from './internal/type.ts' -import { SelectResults } from './select-results.ts' // Parse XML and return information as Javascript types const fxp = new XMLParser() From f9fe211baa234119f32deb126e00576445474c3c Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 22:43:00 +0800 Subject: [PATCH 42/78] Update src/helpers.ts Co-authored-by: Reinaldy Rafli --- src/helpers.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/helpers.ts b/src/helpers.ts index c633be4d..92ad7ec3 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -97,7 +97,7 @@ export class CopySourceOptions { throw new errors.InvalidObjectNameError('Source start must be non-negative, and start must be at most end.') } else if ((this.MatchRange && !isNumber(this.Start)) || !isNumber(this.End)) { throw new errors.InvalidObjectNameError( - 'MatchRange is specified. But Invalid Start and End values are specified. ', + 'MatchRange is specified. But Invalid Start and End values are specified.', ) } From 4f3629a5584a8cf61847c7e887524d22d0ef1b03 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 5 May 2023 23:38:08 +0800 Subject: [PATCH 43/78] perfer const in typescript --- .eslintrc.js | 6 ++++++ src/internal/helper.ts | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.eslintrc.js b/.eslintrc.js index 97a72d1c..ea112c91 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -110,6 +110,12 @@ module.exports = { 'import/no-commonjs': 'error', }, }, + { + files: ['./src/**/*.ts'], + rules: { + 'prefer-const': 'error', + }, + }, { files: ['./tests/**/*'], rules: { diff --git a/src/internal/helper.ts b/src/internal/helper.ts index 1fd2546b..fa72a1e6 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -583,7 +583,7 @@ const fxp = new XMLParser() // eslint-disable-next-line @typescript-eslint/no-explicit-any export function parseXml(xml: string): any { - let result = fxp.parse(xml) + const result = fxp.parse(xml) if (result.Error) { throw result.Error } From 2815daaf117d8e8d16f0cb72e8c70bff498711dc Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 02:37:39 +0800 Subject: [PATCH 44/78] fix pipesetup type --- src/internal/helper.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/internal/helper.ts b/src/internal/helper.ts index fa72a1e6..349303d4 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -307,7 +307,7 @@ export function makeDateShort(date?: Date) { * pipesetup will also make sure that error emitted at any of the upstream Stream * will be emitted at the last stream. This makes error handling simple */ -export function pipesetup(...streams: [stream.Writable, ...stream.Duplex[], stream.Readable]) { +export function pipesetup(...streams: [stream.Readable, ...stream.Duplex[], stream.Writable]) { // @ts-expect-error ts can't narrow this return streams.reduce((src: stream.Readable, dst: stream.Writable) => { src.on('error', (err) => dst.emit('error', err)) From 3f4cdf714d3fdccc357a739a35f7cafcb958c262 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 19:25:39 +0800 Subject: [PATCH 45/78] type rename --- src/helpers.ts | 6 +++--- src/internal/helper.ts | 6 +++--- src/internal/type.ts | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/helpers.ts b/src/helpers.ts index 92ad7ec3..46cab2b9 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -14,7 +14,7 @@ import { isValidBucketName, isValidObjectName, } from './internal/helper.ts' -import type { Encryption, Header, MetaData } from './internal/type.ts' +import type { Encryption, Header, ObjectMetaData } from './internal/type.ts' import { RETENTION_MODES } from './internal/type.ts' export { ENCRYPTION_TYPES, LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' @@ -156,7 +156,7 @@ export class CopyDestinationOptions { public readonly Bucket: string public readonly Object: string private readonly Encryption?: Encryption - private readonly UserMetadata?: MetaData + private readonly UserMetadata?: ObjectMetaData private readonly UserTags?: Record | string private readonly LegalHold?: 'on' | 'off' private readonly RetainUntilDate?: string @@ -185,7 +185,7 @@ export class CopyDestinationOptions { Bucket: string Object: string Encryption?: Encryption - UserMetadata?: MetaData + UserMetadata?: ObjectMetaData UserTags?: Record | string LegalHold?: 'on' | 'off' RetainUntilDate?: string diff --git a/src/internal/helper.ts b/src/internal/helper.ts index 349303d4..01fdf5bb 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -23,7 +23,7 @@ import ipaddr from 'ipaddr.js' import _ from 'lodash' import mime from 'mime-types' -import type { Binary, Encryption, MetaData, ResponseHeader } from './type.ts' +import type { Binary, Encryption, ObjectMetaData, ResponseHeader } from './type.ts' import { ENCRYPTION_TYPES } from './type.ts' /** @@ -329,7 +329,7 @@ export function readableStream(data: unknown): stream.Readable { /** * Process metadata to insert appropriate value to `content-type` attribute */ -export function insertContentType(metaData: MetaData, filePath: string) { +export function insertContentType(metaData: ObjectMetaData, filePath: string) { // check if content-type attribute present in metaData for (const key in metaData) { if (key.toLowerCase() === 'content-type') { @@ -346,7 +346,7 @@ export function insertContentType(metaData: MetaData, filePath: string) { /** * Function prepends metadata with the appropriate prefix if it is not already on */ -export function prependXAMZMeta(metaData?: MetaData) { +export function prependXAMZMeta(metaData?: ObjectMetaData) { if (!metaData) { return {} } diff --git a/src/internal/type.ts b/src/internal/type.ts index 7634ba1e..94b4b3ff 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -3,7 +3,7 @@ export type Binary = string | Buffer // nodejs IncomingHttpHeaders is Record, but it's actually this: export type ResponseHeader = Record -export type MetaData = Record +export type ObjectMetaData = Record export type Header = Record export type Encryption = { type: string From 53d161f911b096aea946c1b83a79e01b1d38061a Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 19:37:57 +0800 Subject: [PATCH 46/78] fix type --- src/internal/helper.ts | 2 +- src/minio.d.ts | 13 +++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/internal/helper.ts b/src/internal/helper.ts index 01fdf5bb..ad234e1d 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -21,7 +21,7 @@ import { isBrowser } from 'browser-or-node' import { XMLParser } from 'fast-xml-parser' import ipaddr from 'ipaddr.js' import _ from 'lodash' -import mime from 'mime-types' +import * as mime from 'mime-types' import type { Binary, Encryption, ObjectMetaData, ResponseHeader } from './type.ts' import { ENCRYPTION_TYPES } from './type.ts' diff --git a/src/minio.d.ts b/src/minio.d.ts index 6d105474..bdd1601d 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -4,6 +4,7 @@ import { EventEmitter } from 'node:events' import type { RequestOptions } from 'node:https' import type { Readable as ReadableStream } from 'node:stream' +import type { Transport } from '../dist/esm/internal/type.mjs' import type { CopyDestinationOptions, CopySourceOptions, @@ -70,7 +71,7 @@ export interface ClientOptions { useSSL?: boolean | undefined port?: number | undefined region?: Region | undefined - transport?: any + transport?: Transport sessionToken?: string | undefined partSize?: number | undefined pathStyle?: boolean | undefined @@ -191,7 +192,7 @@ export interface RetentionOptions { export interface LegalHoldOptions { versionId: string - status: LegalHoldStatus + status: LEGAL_HOLD_STATUS } export interface InputSerialization { @@ -243,13 +244,13 @@ export interface SourceObjectStats { // No need to export this. But without it - linter error. export class TargetConfig { - setId(id: any): void + setId(id: unknown): void - addEvent(newEvent: any): void + addEvent(newEvent: unknown): void - addFilterSuffix(suffix: any): void + addFilterSuffix(suffix: string): void - addFilterPrefix(prefix: any): void + addFilterPrefix(prefix: string): void } export interface MakeBucketOpt { From 9fe8a3ded63cb37d5ff58f1525109692332958a5 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 19:45:21 +0800 Subject: [PATCH 47/78] fix type --- src/internal/type.ts | 5 +++++ src/minio.d.ts | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/internal/type.ts b/src/internal/type.ts index 94b4b3ff..3b1e9a42 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -5,6 +5,9 @@ export type ResponseHeader = Record export type ObjectMetaData = Record export type Header = Record +import type * as http from 'node:http' +import type * as https from 'node:https' + export type Encryption = { type: string SSEAlgorithm?: string @@ -36,3 +39,5 @@ export enum LEGAL_HOLD_STATUS { ENABLED = 'ON', DISABLED = 'OFF', } + +export type Transport = typeof http | typeof https diff --git a/src/minio.d.ts b/src/minio.d.ts index bdd1601d..00c473c5 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -4,7 +4,6 @@ import { EventEmitter } from 'node:events' import type { RequestOptions } from 'node:https' import type { Readable as ReadableStream } from 'node:stream' -import type { Transport } from '../dist/esm/internal/type.mjs' import type { CopyDestinationOptions, CopySourceOptions, @@ -12,6 +11,7 @@ import type { RETENTION_MODES, RETENTION_VALIDITY_UNITS, } from './helpers.ts' +import type { Transport } from './internal/type.ts' import type { Region } from './s3-endpoints.ts' export * from './helpers.ts' From d6cb2195220d8ec2b43d1b604548702e23afefbb Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 19:47:53 +0800 Subject: [PATCH 48/78] ci --- .github/workflows/nodejs-windows.yml | 2 +- .github/workflows/nodejs.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/nodejs-windows.yml b/.github/workflows/nodejs-windows.yml index e7170b8c..da6b8504 100644 --- a/.github/workflows/nodejs-windows.yml +++ b/.github/workflows/nodejs-windows.yml @@ -15,7 +15,7 @@ jobs: strategy: max-parallel: 3 matrix: - node_version: [12.x, 14.x, 16.x, 17.x, 18.x, 20.x] + node_version: [10.x, 12.x, 14.x, 16.x, 18.x, 20.x] os: [windows-latest] steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index 709fd78c..093e19e5 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -15,7 +15,7 @@ jobs: strategy: max-parallel: 3 matrix: - node_version: [12.x, 14.x, 16.x, 17.x, 18.x, 20.x] + node_version: [10.x, 12.x, 14.x, 16.x, 18.x, 20.x] os: [ubuntu-latest] steps: - uses: actions/checkout@v3 From 7d0f34cce71ad77b4b5385b5adb69c89e56215d6 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 20:29:26 +0800 Subject: [PATCH 49/78] chore: upgrade deps --- package-lock.json | 16 ++++------------ package.json | 3 +-- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/package-lock.json b/package-lock.json index 33593aef..4b175f96 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,7 +11,7 @@ "dependencies": { "async": "^3.1.0", "block-stream2": "^2.0.0", - "browser-or-node": "^1.3.0", + "browser-or-node": "^2.1.1", "buffer-crc32": "^0.2.13", "fast-xml-parser": "^4.1.3", "ipaddr.js": "^2.0.1", @@ -33,7 +33,6 @@ "@babel/register": "^7.21.0", "@nodelib/fs.walk": "^1.2.8", "@types/async": "^3.2.18", - "@types/browser-or-node": "^1.3.0", "@types/lodash": "^4.14.192", "@types/mime-types": "^2.1.1", "@types/node": "^18.15.11", @@ -2169,12 +2168,6 @@ "integrity": "sha512-/IsuXp3B9R//uRLi40VlIYoMp7OzhkunPe2fDu7jGfQXI9y3CDCx6FC4juRLSqrpmLst3vgsiK536AAGJFl4Ww==", "dev": true }, - "node_modules/@types/browser-or-node": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@types/browser-or-node/-/browser-or-node-1.3.0.tgz", - "integrity": "sha512-MVetr65IR7RdJbUxVHsaPFaXAO8fi89zv1g8L/mHygh1Q7xnnK02XZLwfMh57FOpTO6gtnagoPMQ/UOFfctXRQ==", - "dev": true - }, "node_modules/@types/json-schema": { "version": "7.0.11", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", @@ -2942,10 +2935,9 @@ } }, "node_modules/browser-or-node": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/browser-or-node/-/browser-or-node-1.3.0.tgz", - "integrity": "sha512-0F2z/VSnLbmEeBcUrSuDH5l0HxTXdQQzLjkmBR4cYfvg1zJrKSlmIZFqyFR8oX0NrwPhy3c3HQ6i3OxMbew4Tg==", - "license": "MIT" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/browser-or-node/-/browser-or-node-2.1.1.tgz", + "integrity": "sha512-8CVjaLJGuSKMVTxJ2DpBl5XnlNDiT4cQFeuCJJrvJmts9YrTZDizTX7PjC2s6W4x+MBGZeEY6dGMrF04/6Hgqg==" }, "node_modules/browser-stdout": { "version": "1.3.1", diff --git a/package.json b/package.json index 40421f9b..90f2bc60 100644 --- a/package.json +++ b/package.json @@ -87,7 +87,7 @@ "dependencies": { "async": "^3.1.0", "block-stream2": "^2.0.0", - "browser-or-node": "^1.3.0", + "browser-or-node": "^2.1.1", "buffer-crc32": "^0.2.13", "fast-xml-parser": "^4.1.3", "ipaddr.js": "^2.0.1", @@ -109,7 +109,6 @@ "@babel/register": "^7.21.0", "@nodelib/fs.walk": "^1.2.8", "@types/async": "^3.2.18", - "@types/browser-or-node": "^1.3.0", "@types/lodash": "^4.14.192", "@types/mime-types": "^2.1.1", "@types/node": "^18.15.11", From 5aebc4d6f77b97693d2d0727864eef9c8cf0211f Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 22:13:22 +0800 Subject: [PATCH 50/78] upgrade old deps --- package-lock.json | 670 +++++++++++++++++++++++----------------------- package.json | 57 ++-- 2 files changed, 368 insertions(+), 359 deletions(-) diff --git a/package-lock.json b/package-lock.json index 4b175f96..f76ce5dd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,58 +9,58 @@ "version": "7.1.1", "license": "Apache-2.0", "dependencies": { - "async": "^3.1.0", - "block-stream2": "^2.0.0", + "async": "^3.2.4", + "block-stream2": "^2.1.0", "browser-or-node": "^2.1.1", "buffer-crc32": "^0.2.13", - "fast-xml-parser": "^4.1.3", + "fast-xml-parser": "^4.2.2", "ipaddr.js": "^2.0.1", "json-stream": "^1.0.0", "lodash": "^4.17.21", - "mime-types": "^2.1.14", - "mkdirp": "^0.5.1", - "query-string": "^7.1.1", - "through2": "^3.0.1", + "mime-types": "^2.1.35", + "mkdirp": "^0.5.6", + "query-string": "^7.1.3", + "through2": "^4.0.2", "web-encoding": "^1.1.5", - "xml": "^1.0.0", + "xml": "^1.0.1", "xml2js": "^0.5.0" }, "devDependencies": { - "@babel/core": "^7.12.10", + "@babel/core": "^7.21.8", "@babel/plugin-transform-modules-commonjs": "^7.21.5", - "@babel/preset-env": "^7.12.10", - "@babel/preset-typescript": "^7.21.4", + "@babel/preset-env": "^7.21.5", + "@babel/preset-typescript": "^7.21.5", "@babel/register": "^7.21.0", "@nodelib/fs.walk": "^1.2.8", - "@types/async": "^3.2.18", - "@types/lodash": "^4.14.192", + "@types/async": "^3.2.20", + "@types/lodash": "^4.14.194", "@types/mime-types": "^2.1.1", - "@types/node": "^18.15.11", + "@types/node": "^20.1.0", "@types/xml": "^1.0.8", "@types/xml2js": "^0.4.11", - "@typescript-eslint/eslint-plugin": "^5.57.1", - "@typescript-eslint/parser": "^5.57.1", - "@upleveled/babel-plugin-remove-node-prefix": "^1.0.4", + "@typescript-eslint/eslint-plugin": "^5.59.2", + "@typescript-eslint/parser": "^5.59.2", + "@upleveled/babel-plugin-remove-node-prefix": "^1.0.5", "babel-plugin-replace-import-extension": "^1.1.3", "babel-plugin-transform-replace-expressions": "^0.2.0", - "chai": "^4.2.0", + "chai": "^4.3.7", "dotenv": "^16.0.3", - "eslint": "^8.37.0", + "eslint": "^8.40.0", "eslint-config-prettier": "^8.8.0", "eslint-import-resolver-typescript": "^3.5.5", "eslint-plugin-import": "^2.27.5", "eslint-plugin-simple-import-sort": "^10.0.0", - "eslint-plugin-unicorn": "^46.0.0", + "eslint-plugin-unicorn": "^47.0.0", "eslint-plugin-unused-imports": "^2.0.0", "husky": "^8.0.3", - "lint-staged": "^13.2.1", - "mocha": "^9.2.0", - "mocha-steps": "^1.1.0", - "nock": "^13.2.2", - "prettier": "^2.8.7", - "source-map-support": "^0.5.13", - "split-file": "^2.2.2", - "superagent": "^5.1.0", + "lint-staged": "^13.2.2", + "mocha": "^9.2.2", + "mocha-steps": "^1.3.0", + "nock": "^13.3.1", + "prettier": "^2.8.8", + "source-map-support": "^0.5.21", + "split-file": "^2.3.0", + "superagent": "^8.0.1", "typescript": "^5.0.4", "uuid": "^9.0.0" }, @@ -96,31 +96,30 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.21.4.tgz", - "integrity": "sha512-/DYyDpeCfaVinT40FPGdkkb+lYSKvsVuMjDAG7jPOWWiM1ibOaB9CXJAlc4d1QpP/U2q2P9jbrSlClKSErd55g==", + "version": "7.21.7", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.21.7.tgz", + "integrity": "sha512-KYMqFYTaenzMK4yUtf4EW9wc4N9ef80FsbMtkwool5zpwl4YrT1SdWYSTRcT94KO4hannogdS+LxY7L+arP3gA==", "dev": true, - "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.21.4.tgz", - "integrity": "sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA==", + "version": "7.21.8", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.21.8.tgz", + "integrity": "sha512-YeM22Sondbo523Sz0+CirSPnbj9bG3P0CdHcBZdqUuaeOaYEFbOLoGU7lebvGP6P5J/WE9wOn7u7C4J9HvS1xQ==", "dev": true, "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.21.4", - "@babel/generator": "^7.21.4", - "@babel/helper-compilation-targets": "^7.21.4", - "@babel/helper-module-transforms": "^7.21.2", - "@babel/helpers": "^7.21.0", - "@babel/parser": "^7.21.4", + "@babel/generator": "^7.21.5", + "@babel/helper-compilation-targets": "^7.21.5", + "@babel/helper-module-transforms": "^7.21.5", + "@babel/helpers": "^7.21.5", + "@babel/parser": "^7.21.8", "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.4", - "@babel/types": "^7.21.4", + "@babel/traverse": "^7.21.5", + "@babel/types": "^7.21.5", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -184,13 +183,12 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.21.4.tgz", - "integrity": "sha512-Fa0tTuOXZ1iL8IeDFUWCzjZcn+sJGd9RZdH9esYVjEejGmzf+FFYQpMi/kZUk2kPy/q1H3/GPw7np8qar/stfg==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.21.5.tgz", + "integrity": "sha512-1RkbFGUKex4lvsB9yhIfWltJM5cZKUftB2eNajaDv3dCMEp49iBG0K14uH8NnX9IPux2+mK7JGEOB0jn48/J6w==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.21.4", + "@babel/compat-data": "^7.21.5", "@babel/helper-validator-option": "^7.21.0", "browserslist": "^4.21.3", "lru-cache": "^5.1.1", @@ -498,15 +496,14 @@ } }, "node_modules/@babel/helpers": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.0.tgz", - "integrity": "sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.5.tgz", + "integrity": "sha512-BSY+JSlHxOmGsPTydUkPf1MdMQ3M81x5xGCOVgWM3G8XH77sJ292Y2oqcp0CbbgxhqBuI46iUz1tT7hqP7EfgA==", "dev": true, - "license": "MIT", "dependencies": { "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.0", - "@babel/types": "^7.21.0" + "@babel/traverse": "^7.21.5", + "@babel/types": "^7.21.5" }, "engines": { "node": ">=6.9.0" @@ -999,6 +996,18 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-syntax-json-strings": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", @@ -1153,13 +1162,12 @@ } }, "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.20.7.tgz", - "integrity": "sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.21.5.tgz", + "integrity": "sha512-wb1mhwGOCaXHDTcsRYMKF9e5bbMgqwxtqa2Y1ifH96dXJPwbuLX9qHy3clhrxVqgMz7nyNXs8VkxdH8UBcjKqA==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.20.2" + "@babel/helper-plugin-utils": "^7.21.5" }, "engines": { "node": ">=6.9.0" @@ -1253,13 +1261,12 @@ } }, "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.20.7.tgz", - "integrity": "sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.21.5.tgz", + "integrity": "sha512-TR653Ki3pAwxBxUe8srfF3e4Pe3FTA46uaNHYyQwIoM4oWKSoOZiDNyHJ0oIoDIUPSRQbQG7jzgVBX3FPVne1Q==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.20.2", + "@babel/helper-plugin-utils": "^7.21.5", "@babel/template": "^7.20.7" }, "engines": { @@ -1336,13 +1343,12 @@ } }, "node_modules/@babel/plugin-transform-for-of": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.21.0.tgz", - "integrity": "sha512-LlUYlydgDkKpIY7mcBWvyPPmMcOphEyYA27Ef4xpbh1IiDNLr0kZsos2nf92vz3IccvJI25QUwp86Eo5s6HmBQ==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.21.5.tgz", + "integrity": "sha512-nYWpjKW/7j/I/mZkGVgHJXh4bA1sfdFnJoOXwJuj4m3Q2EraO/8ZyrkCau9P5tbHQk01RMSt6KYLCsW7730SXQ==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.20.2" + "@babel/helper-plugin-utils": "^7.21.5" }, "engines": { "node": ">=6.9.0" @@ -1554,13 +1560,12 @@ } }, "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.20.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.20.5.tgz", - "integrity": "sha512-kW/oO7HPBtntbsahzQ0qSE3tFvkFwnbozz3NWFhLGqH75vLEg+sCGngLlhVkePlCs3Jv0dBBHDzCHxNiFAQKCQ==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.21.5.tgz", + "integrity": "sha512-ZoYBKDb6LyMi5yCsByQ5jmXsHAQDDYeexT1Szvlmui+lADvfSecr5Dxd/PkrTC3pAD182Fcju1VQkB4oCp9M+w==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.20.2", + "@babel/helper-plugin-utils": "^7.21.5", "regenerator-transform": "^0.15.1" }, "engines": { @@ -1686,13 +1691,12 @@ } }, "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.18.10", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz", - "integrity": "sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.21.5.tgz", + "integrity": "sha512-LYm/gTOwZqsYohlvFUe/8Tujz75LqqVC2w+2qPHLR+WyWHGCZPN1KBpJCJn+4Bk4gOkQy/IXKIge6az5MqwlOg==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.18.9" + "@babel/helper-plugin-utils": "^7.21.5" }, "engines": { "node": ">=6.9.0" @@ -1719,15 +1723,14 @@ } }, "node_modules/@babel/preset-env": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.21.4.tgz", - "integrity": "sha512-2W57zHs2yDLm6GD5ZpvNn71lZ0B/iypSdIeq25OurDKji6AdzV07qp4s3n1/x5BqtiGaTrPN3nerlSCaC5qNTw==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.21.5.tgz", + "integrity": "sha512-wH00QnTTldTbf/IefEVyChtRdw5RJvODT/Vb4Vcxq1AZvtXj6T0YeX0cAcXhI6/BdGuiP3GcNIL4OQbI2DVNxg==", "dev": true, - "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.21.4", - "@babel/helper-compilation-targets": "^7.21.4", - "@babel/helper-plugin-utils": "^7.20.2", + "@babel/compat-data": "^7.21.5", + "@babel/helper-compilation-targets": "^7.21.5", + "@babel/helper-plugin-utils": "^7.21.5", "@babel/helper-validator-option": "^7.21.0", "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.18.6", "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.20.7", @@ -1752,6 +1755,7 @@ "@babel/plugin-syntax-dynamic-import": "^7.8.3", "@babel/plugin-syntax-export-namespace-from": "^7.8.3", "@babel/plugin-syntax-import-assertions": "^7.20.0", + "@babel/plugin-syntax-import-meta": "^7.10.4", "@babel/plugin-syntax-json-strings": "^7.8.3", "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", @@ -1761,22 +1765,22 @@ "@babel/plugin-syntax-optional-chaining": "^7.8.3", "@babel/plugin-syntax-private-property-in-object": "^7.14.5", "@babel/plugin-syntax-top-level-await": "^7.14.5", - "@babel/plugin-transform-arrow-functions": "^7.20.7", + "@babel/plugin-transform-arrow-functions": "^7.21.5", "@babel/plugin-transform-async-to-generator": "^7.20.7", "@babel/plugin-transform-block-scoped-functions": "^7.18.6", "@babel/plugin-transform-block-scoping": "^7.21.0", "@babel/plugin-transform-classes": "^7.21.0", - "@babel/plugin-transform-computed-properties": "^7.20.7", + "@babel/plugin-transform-computed-properties": "^7.21.5", "@babel/plugin-transform-destructuring": "^7.21.3", "@babel/plugin-transform-dotall-regex": "^7.18.6", "@babel/plugin-transform-duplicate-keys": "^7.18.9", "@babel/plugin-transform-exponentiation-operator": "^7.18.6", - "@babel/plugin-transform-for-of": "^7.21.0", + "@babel/plugin-transform-for-of": "^7.21.5", "@babel/plugin-transform-function-name": "^7.18.9", "@babel/plugin-transform-literals": "^7.18.9", "@babel/plugin-transform-member-expression-literals": "^7.18.6", "@babel/plugin-transform-modules-amd": "^7.20.11", - "@babel/plugin-transform-modules-commonjs": "^7.21.2", + "@babel/plugin-transform-modules-commonjs": "^7.21.5", "@babel/plugin-transform-modules-systemjs": "^7.20.11", "@babel/plugin-transform-modules-umd": "^7.18.6", "@babel/plugin-transform-named-capturing-groups-regex": "^7.20.5", @@ -1784,17 +1788,17 @@ "@babel/plugin-transform-object-super": "^7.18.6", "@babel/plugin-transform-parameters": "^7.21.3", "@babel/plugin-transform-property-literals": "^7.18.6", - "@babel/plugin-transform-regenerator": "^7.20.5", + "@babel/plugin-transform-regenerator": "^7.21.5", "@babel/plugin-transform-reserved-words": "^7.18.6", "@babel/plugin-transform-shorthand-properties": "^7.18.6", "@babel/plugin-transform-spread": "^7.20.7", "@babel/plugin-transform-sticky-regex": "^7.18.6", "@babel/plugin-transform-template-literals": "^7.18.9", "@babel/plugin-transform-typeof-symbol": "^7.18.9", - "@babel/plugin-transform-unicode-escapes": "^7.18.10", + "@babel/plugin-transform-unicode-escapes": "^7.21.5", "@babel/plugin-transform-unicode-regex": "^7.18.6", "@babel/preset-modules": "^0.1.5", - "@babel/types": "^7.21.4", + "@babel/types": "^7.21.5", "babel-plugin-polyfill-corejs2": "^0.3.3", "babel-plugin-polyfill-corejs3": "^0.6.0", "babel-plugin-polyfill-regenerator": "^0.4.1", @@ -1826,15 +1830,15 @@ } }, "node_modules/@babel/preset-typescript": { - "version": "7.21.4", - "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.21.4.tgz", - "integrity": "sha512-sMLNWY37TCdRH/bJ6ZeeOH1nPuanED7Ai9Y/vH31IPqalioJ6ZNFUWONsakhv4r4n+I6gm5lmoE0olkgib/j/A==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.21.5.tgz", + "integrity": "sha512-iqe3sETat5EOrORXiQ6rWfoOg2y68Cs75B9wNxdPW4kixJxh7aXQE1KPdWLDniC24T/6dSnguF33W9j/ZZQcmA==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.20.2", + "@babel/helper-plugin-utils": "^7.21.5", "@babel/helper-validator-option": "^7.21.0", "@babel/plugin-syntax-jsx": "^7.21.4", - "@babel/plugin-transform-modules-commonjs": "^7.21.2", + "@babel/plugin-transform-modules-commonjs": "^7.21.5", "@babel/plugin-transform-typescript": "^7.21.3" }, "engines": { @@ -1871,11 +1875,10 @@ "license": "MIT" }, "node_modules/@babel/runtime": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz", - "integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==", + "version": "7.21.5", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.5.tgz", + "integrity": "sha512-8jI69toZqqcsnqGGqwGS4Qb1VwLOEp4hz+CXPywcvjs60u3B4Pom/U/7rm4W8tMOYEB+E9wgD0mW1l3r8qlI9Q==", "dev": true, - "license": "MIT", "dependencies": { "regenerator-runtime": "^0.13.11" }, @@ -1970,15 +1973,14 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.2.tgz", - "integrity": "sha512-3W4f5tDUra+pA+FzgugqL2pRimUTDJWKr7BINqOpkZrC0uYI0NIc0/JFgBROCU07HR6GieA5m3/rsPIhDmCXTQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.3.tgz", + "integrity": "sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ==", "dev": true, - "license": "MIT", "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", - "espree": "^9.5.1", + "espree": "^9.5.2", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", @@ -1994,11 +1996,10 @@ } }, "node_modules/@eslint/js": { - "version": "8.38.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.38.0.tgz", - "integrity": "sha512-IoD2MfUnOV58ghIHCiil01PcohxjbYR/qCxsoC+xNgUwh1EY8jOOrYmu3d3a71+tJJ23uscEV4X2HJWMsPJu4g==", + "version": "8.40.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.40.0.tgz", + "integrity": "sha512-ElyB54bJIhXQYVKjDSvCkPO1iU1tSAeVQJbllWJq1XQSmmA4dgFk8CbiBGpiOPxleE48vDogxCtmMYku4HSVLA==", "dev": true, - "license": "MIT", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } @@ -2163,17 +2164,16 @@ "dev": true }, "node_modules/@types/async": { - "version": "3.2.18", - "resolved": "https://registry.npmjs.org/@types/async/-/async-3.2.18.tgz", - "integrity": "sha512-/IsuXp3B9R//uRLi40VlIYoMp7OzhkunPe2fDu7jGfQXI9y3CDCx6FC4juRLSqrpmLst3vgsiK536AAGJFl4Ww==", + "version": "3.2.20", + "resolved": "https://registry.npmjs.org/@types/async/-/async-3.2.20.tgz", + "integrity": "sha512-6jSBQQugzyX1aWto0CbvOnmxrU9tMoXfA9gc4IrLEtvr3dTwSg5GLGoWiZnGLI6UG/kqpB3JOQKQrqnhUWGKQA==", "dev": true }, "node_modules/@types/json-schema": { "version": "7.0.11", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/@types/json5": { "version": "0.0.29", @@ -2182,11 +2182,10 @@ "dev": true }, "node_modules/@types/lodash": { - "version": "4.14.192", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.192.tgz", - "integrity": "sha512-km+Vyn3BYm5ytMO13k9KTp27O75rbQ0NFw+U//g+PX7VZyjCioXaRFisqSIJRECljcTv73G3i6BpglNGHgUQ5A==", - "dev": true, - "license": "MIT" + "version": "4.14.194", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.194.tgz", + "integrity": "sha512-r22s9tAS7imvBt2lyHC9B8AGwWnXaYb1tY09oyLkXDs4vArpYJzw09nj8MLx5VfciBPGIb+ZwG0ssYnEPJxn/g==", + "dev": true }, "node_modules/@types/mime-types": { "version": "2.1.1", @@ -2195,11 +2194,10 @@ "dev": true }, "node_modules/@types/node": { - "version": "18.15.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz", - "integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==", - "dev": true, - "license": "MIT" + "version": "20.1.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.1.0.tgz", + "integrity": "sha512-O+z53uwx64xY7D6roOi4+jApDGFg0qn6WHcxe5QeqjMaTezBO/mxdfFXIVAVVyNWKx84OmPB3L8kbVYOTeN34A==", + "dev": true }, "node_modules/@types/normalize-package-data": { "version": "2.4.1", @@ -2211,8 +2209,7 @@ "version": "7.3.13", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz", "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/@types/xml": { "version": "1.0.8", @@ -2233,16 +2230,15 @@ } }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "5.57.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.57.1.tgz", - "integrity": "sha512-1MeobQkQ9tztuleT3v72XmY0XuKXVXusAhryoLuU5YZ+mXoYKZP9SQ7Flulh1NX4DTjpGTc2b/eMu4u7M7dhnQ==", + "version": "5.59.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.2.tgz", + "integrity": "sha512-yVrXupeHjRxLDcPKL10sGQ/QlVrA8J5IYOEWVqk0lJaSZP7X5DfnP7Ns3cc74/blmbipQ1htFNVGsHX6wsYm0A==", "dev": true, - "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.4.0", - "@typescript-eslint/scope-manager": "5.57.1", - "@typescript-eslint/type-utils": "5.57.1", - "@typescript-eslint/utils": "5.57.1", + "@typescript-eslint/scope-manager": "5.59.2", + "@typescript-eslint/type-utils": "5.59.2", + "@typescript-eslint/utils": "5.59.2", "debug": "^4.3.4", "grapheme-splitter": "^1.0.4", "ignore": "^5.2.0", @@ -2304,15 +2300,14 @@ "license": "ISC" }, "node_modules/@typescript-eslint/parser": { - "version": "5.57.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.57.1.tgz", - "integrity": "sha512-hlA0BLeVSA/wBPKdPGxoVr9Pp6GutGoY380FEhbVi0Ph4WNe8kLvqIRx76RSQt1lynZKfrXKs0/XeEk4zZycuA==", + "version": "5.59.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.2.tgz", + "integrity": "sha512-uq0sKyw6ao1iFOZZGk9F8Nro/8+gfB5ezl1cA06SrqbgJAt0SRoFhb9pXaHvkrxUpZaoLxt8KlovHNk8Gp6/HQ==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { - "@typescript-eslint/scope-manager": "5.57.1", - "@typescript-eslint/types": "5.57.1", - "@typescript-eslint/typescript-estree": "5.57.1", + "@typescript-eslint/scope-manager": "5.59.2", + "@typescript-eslint/types": "5.59.2", + "@typescript-eslint/typescript-estree": "5.59.2", "debug": "^4.3.4" }, "engines": { @@ -2332,14 +2327,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "5.57.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.57.1.tgz", - "integrity": "sha512-N/RrBwEUKMIYxSKl0oDK5sFVHd6VI7p9K5MyUlVYAY6dyNb/wHUqndkTd3XhpGlXgnQsBkRZuu4f9kAHghvgPw==", + "version": "5.59.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.2.tgz", + "integrity": "sha512-dB1v7ROySwQWKqQ8rEWcdbTsFjh2G0vn8KUyvTXdPoyzSL6lLGkiXEV5CvpJsEe9xIdKV+8Zqb7wif2issoOFA==", "dev": true, - "license": "MIT", "dependencies": { - "@typescript-eslint/types": "5.57.1", - "@typescript-eslint/visitor-keys": "5.57.1" + "@typescript-eslint/types": "5.59.2", + "@typescript-eslint/visitor-keys": "5.59.2" }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -2350,14 +2344,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "5.57.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.57.1.tgz", - "integrity": "sha512-/RIPQyx60Pt6ga86hKXesXkJ2WOS4UemFrmmq/7eOyiYjYv/MUSHPlkhU6k9T9W1ytnTJueqASW+wOmW4KrViw==", + "version": "5.59.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.2.tgz", + "integrity": "sha512-b1LS2phBOsEy/T381bxkkywfQXkV1dWda/z0PhnIy3bC5+rQWQDS7fk9CSpcXBccPY27Z6vBEuaPBCKCgYezyQ==", "dev": true, - "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "5.57.1", - "@typescript-eslint/utils": "5.57.1", + "@typescript-eslint/typescript-estree": "5.59.2", + "@typescript-eslint/utils": "5.59.2", "debug": "^4.3.4", "tsutils": "^3.21.0" }, @@ -2378,11 +2371,10 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "5.57.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.57.1.tgz", - "integrity": "sha512-bSs4LOgyV3bJ08F5RDqO2KXqg3WAdwHCu06zOqcQ6vqbTJizyBhuh1o1ImC69X4bV2g1OJxbH71PJqiO7Y1RuA==", + "version": "5.59.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.2.tgz", + "integrity": "sha512-LbJ/HqoVs2XTGq5shkiKaNTuVv5tTejdHgfdjqRUGdYhjW1crm/M7og2jhVskMt8/4wS3T1+PfFvL1K3wqYj4w==", "dev": true, - "license": "MIT", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -2392,14 +2384,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.57.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.57.1.tgz", - "integrity": "sha512-A2MZqD8gNT0qHKbk2wRspg7cHbCDCk2tcqt6ScCFLr5Ru8cn+TCfM786DjPhqwseiS+PrYwcXht5ztpEQ6TFTw==", + "version": "5.59.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.2.tgz", + "integrity": "sha512-+j4SmbwVmZsQ9jEyBMgpuBD0rKwi9RxRpjX71Brr73RsYnEr3Lt5QZ624Bxphp8HUkSKfqGnPJp1kA5nl0Sh7Q==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { - "@typescript-eslint/types": "5.57.1", - "@typescript-eslint/visitor-keys": "5.57.1", + "@typescript-eslint/types": "5.59.2", + "@typescript-eslint/visitor-keys": "5.59.2", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2424,7 +2415,6 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, - "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -2433,11 +2423,10 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.0.tgz", + "integrity": "sha512-+XC0AD/R7Q2mPSRuy2Id0+CGTZ98+8f+KvwirxOKIEyid+XSx6HbC63p+O4IndTHuX5Z+JxQ0TghCkO5Cg/2HA==", "dev": true, - "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -2452,22 +2441,20 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true, - "license": "ISC" + "dev": true }, "node_modules/@typescript-eslint/utils": { - "version": "5.57.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.57.1.tgz", - "integrity": "sha512-kN6vzzf9NkEtawECqze6v99LtmDiUJCVpvieTFA1uL7/jDghiJGubGZ5csicYHU1Xoqb3oH/R5cN5df6W41Nfg==", + "version": "5.59.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.2.tgz", + "integrity": "sha512-kSuF6/77TZzyGPhGO4uVp+f0SBoYxCDf+lW3GKhtKru/L8k/Hd7NFQxyWUeY7Z/KGB2C6Fe3yf2vVi4V9TsCSQ==", "dev": true, - "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@types/json-schema": "^7.0.9", "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.57.1", - "@typescript-eslint/types": "5.57.1", - "@typescript-eslint/typescript-estree": "5.57.1", + "@typescript-eslint/scope-manager": "5.59.2", + "@typescript-eslint/types": "5.59.2", + "@typescript-eslint/typescript-estree": "5.59.2", "eslint-scope": "^5.1.1", "semver": "^7.3.7" }, @@ -2482,36 +2469,11 @@ "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/@typescript-eslint/utils/node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, "node_modules/@typescript-eslint/utils/node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, - "license": "ISC", "dependencies": { "yallist": "^4.0.0" }, @@ -2520,11 +2482,10 @@ } }, "node_modules/@typescript-eslint/utils/node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.0.tgz", + "integrity": "sha512-+XC0AD/R7Q2mPSRuy2Id0+CGTZ98+8f+KvwirxOKIEyid+XSx6HbC63p+O4IndTHuX5Z+JxQ0TghCkO5Cg/2HA==", "dev": true, - "license": "ISC", "dependencies": { "lru-cache": "^6.0.0" }, @@ -2539,17 +2500,15 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true, - "license": "ISC" + "dev": true }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.57.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.57.1.tgz", - "integrity": "sha512-RjQrAniDU0CEk5r7iphkm731zKlFiUjvcBS2yHAg8WWqFMCaCrD0rKEVOMUyMMcbGPZ0bPp56srkGWrgfZqLRA==", + "version": "5.59.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.2.tgz", + "integrity": "sha512-EEpsO8m3RASrKAHI9jpavNv9NlEUebV4qmF1OWxSTtKSFBpC1NCmWazDQHFivRf0O1DV11BA645yrLEVQ0/Lig==", "dev": true, - "license": "MIT", "dependencies": { - "@typescript-eslint/types": "5.57.1", + "@typescript-eslint/types": "5.59.2", "eslint-visitor-keys": "^3.3.0" }, "engines": { @@ -2568,9 +2527,9 @@ "license": "ISC" }, "node_modules/@upleveled/babel-plugin-remove-node-prefix": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@upleveled/babel-plugin-remove-node-prefix/-/babel-plugin-remove-node-prefix-1.0.4.tgz", - "integrity": "sha512-EBiMQNjGgDWhe/BcDRbb1R4q4SqS9bMH+NDFZMVMk1XrEHUr4Q5kMKZYDtj79y5QSASYCMQ29dLk9SvCv6haVQ==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@upleveled/babel-plugin-remove-node-prefix/-/babel-plugin-remove-node-prefix-1.0.5.tgz", + "integrity": "sha512-fBej/v/GHClDJ3H6vgUQOFeH+4dFUrcFJbu9mfJFratnzEBebrgYxPBXv3ssaArTt9HhvgsTVqemeswTum6b2Q==", "dev": true, "peerDependencies": { "@babel/core": "^7.0.0-0" @@ -2588,7 +2547,6 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==", "dev": true, - "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -2601,7 +2559,6 @@ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, - "license": "MIT", "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } @@ -2624,7 +2581,6 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, - "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -2733,7 +2689,6 @@ "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } @@ -2774,6 +2729,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true + }, "node_modules/assertion-error": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", @@ -3021,7 +2982,6 @@ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "dev": true, - "license": "MIT", "engines": { "node": ">=6" } @@ -3469,6 +3429,16 @@ "node": ">=0.4.0" } }, + "node_modules/dezalgo": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", + "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", + "dev": true, + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, "node_modules/diff": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", @@ -3668,16 +3638,15 @@ } }, "node_modules/eslint": { - "version": "8.38.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.38.0.tgz", - "integrity": "sha512-pIdsD2jwlUGf/U38Jv97t8lq6HpaU/G9NKbYmpWpZGw3LdTNhZLbJePqxOXGB5+JEKfOPU/XLxYxFh03nr1KTg==", + "version": "8.40.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.40.0.tgz", + "integrity": "sha512-bvR+TsP9EHL3TqNtj9sCNJVAFK3fBN8Q7g5waghxyRsPLIMwL73XSKnZFK0hk/O2ANC+iAoq6PWMQ+IfBAJIiQ==", "dev": true, - "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.0.2", - "@eslint/js": "8.38.0", + "@eslint/eslintrc": "^2.0.3", + "@eslint/js": "8.40.0", "@humanwhocodes/config-array": "^0.11.8", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -3687,9 +3656,9 @@ "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.1.1", - "eslint-visitor-keys": "^3.4.0", - "espree": "^9.5.1", + "eslint-scope": "^7.2.0", + "eslint-visitor-keys": "^3.4.1", + "espree": "^9.5.2", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -3901,36 +3870,36 @@ } }, "node_modules/eslint-plugin-unicorn": { - "version": "46.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-46.0.0.tgz", - "integrity": "sha512-j07WkC+PFZwk8J33LYp6JMoHa1lXc1u6R45pbSAipjpfpb7KIGr17VE2D685zCxR5VL4cjrl65kTJflziQWMDA==", + "version": "47.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-47.0.0.tgz", + "integrity": "sha512-ivB3bKk7fDIeWOUmmMm9o3Ax9zbMz1Bsza/R2qm46ufw4T6VBFBaJIR1uN3pCKSmSXm8/9Nri8V+iUut1NhQGA==", "dev": true, "dependencies": { "@babel/helper-validator-identifier": "^7.19.1", - "@eslint-community/eslint-utils": "^4.1.2", - "ci-info": "^3.6.1", + "@eslint-community/eslint-utils": "^4.4.0", + "ci-info": "^3.8.0", "clean-regexp": "^1.0.0", - "esquery": "^1.4.0", + "esquery": "^1.5.0", "indent-string": "^4.0.0", - "is-builtin-module": "^3.2.0", + "is-builtin-module": "^3.2.1", "jsesc": "^3.0.2", "lodash": "^4.17.21", "pluralize": "^8.0.0", "read-pkg-up": "^7.0.1", "regexp-tree": "^0.1.24", - "regjsparser": "^0.9.1", + "regjsparser": "^0.10.0", "safe-regex": "^2.1.1", "semver": "^7.3.8", "strip-indent": "^3.0.0" }, "engines": { - "node": ">=14.18" + "node": ">=16" }, "funding": { "url": "https://github.com/sindresorhus/eslint-plugin-unicorn?sponsor=1" }, "peerDependencies": { - "eslint": ">=8.28.0" + "eslint": ">=8.38.0" } }, "node_modules/eslint-plugin-unicorn/node_modules/jsesc": { @@ -3957,6 +3926,27 @@ "node": ">=10" } }, + "node_modules/eslint-plugin-unicorn/node_modules/regjsparser": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.10.0.tgz", + "integrity": "sha512-qx+xQGZVsy55CH0a1hiVwHmqjLryfh7wQyF5HO07XJ9f7dQMY/gPQHhlyDkIzJKC+x2fUCpCcUODUUUFrm7SHA==", + "dev": true, + "dependencies": { + "jsesc": "~0.5.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + } + }, "node_modules/eslint-plugin-unicorn/node_modules/semver": { "version": "7.4.0", "resolved": "https://registry.npmjs.org/semver/-/semver-7.4.0.tgz", @@ -4009,25 +3999,48 @@ } }, "node_modules/eslint-scope": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", - "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" + "estraverse": "^4.1.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": ">=8.0.0" + } + }, + "node_modules/eslint-scope/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" } }, "node_modules/eslint-visitor-keys": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.0.tgz", - "integrity": "sha512-HPpKPUBQcAsZOsHAFwTtIKcYlCje62XB7SEAcxjtmW6TD1WVpkS6i6/hOVtTZIl4zGj/mBqpFVGvaDneik+VoQ==", + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", + "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", "dev": true, - "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/eslint-scope": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", + "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, @@ -4049,15 +4062,14 @@ } }, "node_modules/espree": { - "version": "9.5.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.1.tgz", - "integrity": "sha512-5yxtHSZXRSW5pvv3hAlXM5+/Oswi1AUFqBmbibKb5s6bp3rGIDkyXU6xCoyuuLhijr4SFwPrXRoZjz0AZDN9tg==", + "version": "9.5.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.2.tgz", + "integrity": "sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { "acorn": "^8.8.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.4.0" + "eslint-visitor-keys": "^3.4.1" }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -4084,7 +4096,6 @@ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { "estraverse": "^5.2.0" }, @@ -4139,8 +4150,7 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/fast-glob": { "version": "3.2.12", @@ -4163,8 +4173,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/fast-safe-stringify": { "version": "2.1.1", @@ -4174,19 +4183,24 @@ "license": "MIT" }, "node_modules/fast-xml-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.0.tgz", - "integrity": "sha512-+zVQv4aVTO+o8oRUyRL7PjgeVo1J6oP8Cw2+a8UTZQcj5V0yUK5T63gTN0ldgiHDPghUjKc4OpT6SwMTwnOQug==", - "license": "MIT", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.2.tgz", + "integrity": "sha512-DLzIPtQqmvmdq3VUKR7T6omPK/VCRNqgFlGtbESfyhcH2R4I8EzK1/K6E8PkRCK2EabWrUHK32NjYRbEFnnz0Q==", + "funding": [ + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }, + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], "dependencies": { "strnum": "^1.0.5" }, "bin": { "fxparser": "src/cli/cli.js" - }, - "funding": { - "type": "paypal", - "url": "https://paypal.me/naturalintelligence" } }, "node_modules/fastq": { @@ -4321,9 +4335,9 @@ } }, "node_modules/form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", "dev": true, "dependencies": { "asynckit": "^0.4.0", @@ -4335,11 +4349,16 @@ } }, "node_modules/formidable": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", - "integrity": "sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==", - "deprecated": "Please upgrade to latest, formidable@v2 or formidable@v3! Check these notes: https://bit.ly/2ZEqIau", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.2.tgz", + "integrity": "sha512-CM3GuJ57US06mlpQ47YcunuUZ9jpm8Vx+P2CGt2j7HpgkKZO/DJYQ0Bobim8G6PFQmK5lOqOOdUXboU+h73A4g==", "dev": true, + "dependencies": { + "dezalgo": "^1.0.4", + "hexoid": "^1.0.0", + "once": "^1.4.0", + "qs": "^6.11.0" + }, "funding": { "url": "https://ko-fi.com/tunnckoCore/commissions" } @@ -4517,7 +4536,6 @@ "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", "dev": true, - "license": "MIT", "dependencies": { "type-fest": "^0.20.2" }, @@ -4554,7 +4572,6 @@ "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "dev": true, - "license": "MIT", "dependencies": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", @@ -4703,6 +4720,15 @@ "he": "bin/he" } }, + "node_modules/hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/hosted-git-info": { "version": "2.8.9", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", @@ -4748,7 +4774,6 @@ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dev": true, - "license": "MIT", "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" @@ -5271,8 +5296,7 @@ "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", @@ -5346,9 +5370,9 @@ "dev": true }, "node_modules/lint-staged": { - "version": "13.2.1", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.2.1.tgz", - "integrity": "sha512-8gfzinVXoPfga5Dz/ZOn8I2GOhf81Wvs+KwbEXQn/oWZAvCVS2PivrXfVbFJc93zD16uC0neS47RXHIjXKYZQw==", + "version": "13.2.2", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.2.2.tgz", + "integrity": "sha512-71gSwXKy649VrSU09s10uAT0rWCcY3aewhMaHyl2N84oBk4Xs9HgxvUp3AYu+bNsK4NrOYYxvSgg7FyGJ+jGcA==", "dev": true, "dependencies": { "chalk": "5.2.0", @@ -5363,7 +5387,7 @@ "object-inspect": "^1.12.3", "pidtree": "^0.6.0", "string-argv": "^0.3.1", - "yaml": "^2.2.1" + "yaml": "^2.2.2" }, "bin": { "lint-staged": "bin/lint-staged.js" @@ -5937,11 +5961,10 @@ "license": "MIT" }, "node_modules/nock": { - "version": "13.3.0", - "resolved": "https://registry.npmjs.org/nock/-/nock-13.3.0.tgz", - "integrity": "sha512-HHqYQ6mBeiMc+N038w8LkMpDCRquCHWeNmN3v6645P3NhN2+qXOBqvPqo7Rt1VyCMzKhJ733wZqw5B7cQVFNPg==", + "version": "13.3.1", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.3.1.tgz", + "integrity": "sha512-vHnopocZuI93p2ccivFyGuUfzjq2fxNyNurp7816mlT5V5HF4SzXu8lvLrVzBbNqzs+ODooZ6OksuSUNM7Njkw==", "dev": true, - "license": "MIT", "dependencies": { "debug": "^4.1.0", "json-stringify-safe": "^5.0.1", @@ -6199,7 +6222,6 @@ "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dev": true, - "license": "MIT", "dependencies": { "callsites": "^3.0.0" }, @@ -6425,11 +6447,10 @@ } }, "node_modules/prettier": { - "version": "2.8.7", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.7.tgz", - "integrity": "sha512-yPngTo3aXUUmyuTjeTUT75txrf+aMh9FiD7q9ZE/i6r0bPb22g4FsE6Y338PQX1bmfy08i9QQCB7/rcUAVntfw==", + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", "dev": true, - "license": "MIT", "bin": { "prettier": "bin-prettier.js" }, @@ -6450,6 +6471,15 @@ "node": ">= 8" } }, + "node_modules/punycode": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", + "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/qs": { "version": "6.11.1", "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.1.tgz", @@ -6666,15 +6696,13 @@ "version": "0.13.11", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/regenerator-transform": { "version": "0.15.1", "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.1.tgz", "integrity": "sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg==", "dev": true, - "license": "MIT", "dependencies": { "@babel/runtime": "^7.8.4" } @@ -6778,7 +6806,6 @@ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true, - "license": "MIT", "engines": { "node": ">=4" } @@ -7004,7 +7031,6 @@ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true, - "license": "MIT", "engines": { "node": ">=8" } @@ -7300,26 +7326,24 @@ "license": "MIT" }, "node_modules/superagent": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/superagent/-/superagent-5.3.1.tgz", - "integrity": "sha512-wjJ/MoTid2/RuGCOFtlacyGNxN9QLMgcpYLDQlWFIhhdJ93kNscFonGvrpAHSCVjRVj++DGCglocF7Aej1KHvQ==", - "deprecated": "Please upgrade to v7.0.2+ of superagent. We have fixed numerous issues with streams, form-data, attach(), filesystem errors not bubbling up (ENOENT on attach()), and all tests are now passing. See the releases tab for more information at .", + "version": "8.0.9", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.0.9.tgz", + "integrity": "sha512-4C7Bh5pyHTvU33KpZgwrNKh/VQnvgtCSqPRfJAUdmrtSYePVzVg4E4OzsrbkhJj9O7SO6Bnv75K/F8XVZT8YHA==", "dev": true, "dependencies": { "component-emitter": "^1.3.0", - "cookiejar": "^2.1.2", - "debug": "^4.1.1", - "fast-safe-stringify": "^2.0.7", - "form-data": "^3.0.0", - "formidable": "^1.2.2", + "cookiejar": "^2.1.4", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.1.2", "methods": "^1.1.2", - "mime": "^2.4.6", - "qs": "^6.9.4", - "readable-stream": "^3.6.0", - "semver": "^7.3.2" + "mime": "2.6.0", + "qs": "^6.11.0", + "semver": "^7.3.8" }, "engines": { - "node": ">= 7.0.0" + "node": ">=6.4.0 <13 || >=14" } }, "node_modules/superagent/node_modules/lru-cache": { @@ -7433,13 +7457,11 @@ "license": "MIT" }, "node_modules/through2": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz", - "integrity": "sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ==", - "license": "MIT", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", + "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", "dependencies": { - "inherits": "^2.0.4", - "readable-stream": "2 || 3" + "readable-stream": "3" } }, "node_modules/tiny-glob": { @@ -7560,7 +7582,6 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true, - "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" }, @@ -7687,21 +7708,10 @@ "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { "punycode": "^2.1.0" } }, - "node_modules/uri-js/node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/util": { "version": "0.12.5", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", diff --git a/package.json b/package.json index 90f2bc60..0a386359 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,6 @@ "test": "mocha", "lint": "eslint --ext js,mjs,cjs,ts ./", "lint-fix": "eslint --ext js,mjs,cjs,ts ./ --fix", - "prepublish": "", "prepublishOnly": "npm test && npm run build", "functional": "mocha tests/functional/functional-tests.js", "format": "prettier -w .", @@ -85,58 +84,58 @@ }, "homepage": "https://github.com/minio/minio-js#readme", "dependencies": { - "async": "^3.1.0", - "block-stream2": "^2.0.0", + "async": "^3.2.4", + "block-stream2": "^2.1.0", "browser-or-node": "^2.1.1", "buffer-crc32": "^0.2.13", - "fast-xml-parser": "^4.1.3", + "fast-xml-parser": "^4.2.2", "ipaddr.js": "^2.0.1", "json-stream": "^1.0.0", "lodash": "^4.17.21", - "mime-types": "^2.1.14", - "mkdirp": "^0.5.1", - "query-string": "^7.1.1", - "through2": "^3.0.1", + "mime-types": "^2.1.35", + "mkdirp": "^0.5.6", + "query-string": "^7.1.3", + "through2": "^4.0.2", "web-encoding": "^1.1.5", - "xml": "^1.0.0", + "xml": "^1.0.1", "xml2js": "^0.5.0" }, "devDependencies": { - "@babel/core": "^7.12.10", + "@babel/core": "^7.21.8", "@babel/plugin-transform-modules-commonjs": "^7.21.5", - "@babel/preset-env": "^7.12.10", - "@babel/preset-typescript": "^7.21.4", + "@babel/preset-env": "^7.21.5", + "@babel/preset-typescript": "^7.21.5", "@babel/register": "^7.21.0", "@nodelib/fs.walk": "^1.2.8", - "@types/async": "^3.2.18", - "@types/lodash": "^4.14.192", + "@types/async": "^3.2.20", + "@types/lodash": "^4.14.194", "@types/mime-types": "^2.1.1", - "@types/node": "^18.15.11", + "@types/node": "^20.1.0", "@types/xml": "^1.0.8", "@types/xml2js": "^0.4.11", - "@typescript-eslint/eslint-plugin": "^5.57.1", - "@typescript-eslint/parser": "^5.57.1", - "@upleveled/babel-plugin-remove-node-prefix": "^1.0.4", + "@typescript-eslint/eslint-plugin": "^5.59.2", + "@typescript-eslint/parser": "^5.59.2", + "@upleveled/babel-plugin-remove-node-prefix": "^1.0.5", "babel-plugin-replace-import-extension": "^1.1.3", "babel-plugin-transform-replace-expressions": "^0.2.0", - "chai": "^4.2.0", + "chai": "^4.3.7", "dotenv": "^16.0.3", - "eslint": "^8.37.0", + "eslint": "^8.40.0", "eslint-config-prettier": "^8.8.0", "eslint-import-resolver-typescript": "^3.5.5", "eslint-plugin-import": "^2.27.5", "eslint-plugin-simple-import-sort": "^10.0.0", - "eslint-plugin-unicorn": "^46.0.0", + "eslint-plugin-unicorn": "^47.0.0", "eslint-plugin-unused-imports": "^2.0.0", "husky": "^8.0.3", - "lint-staged": "^13.2.1", - "mocha": "^9.2.0", - "mocha-steps": "^1.1.0", - "nock": "^13.2.2", - "prettier": "^2.8.7", - "source-map-support": "^0.5.13", - "split-file": "^2.2.2", - "superagent": "^5.1.0", + "lint-staged": "^13.2.2", + "mocha": "^9.2.2", + "mocha-steps": "^1.3.0", + "nock": "^13.3.1", + "prettier": "^2.8.8", + "source-map-support": "^0.5.21", + "split-file": "^2.3.0", + "superagent": "^8.0.1", "typescript": "^5.0.4", "uuid": "^9.0.0" }, From d2052b54955602a08bf2079aa65b562c5307d16f Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 22:54:07 +0800 Subject: [PATCH 51/78] ignore pnpm lock --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 5e2c97e7..abd45344 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ node_modules yarn.lock .yarn/ .yarnrc.yml +pnpm-lock.yaml From 8195e790862e0d866ff86a0dd8210ceefb3a6404 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 6 May 2023 23:37:07 +0800 Subject: [PATCH 52/78] speedup build --- build.mjs | 27 +++++++++++---------------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/build.mjs b/build.mjs index 71d43aca..54194b87 100644 --- a/build.mjs +++ b/build.mjs @@ -1,8 +1,9 @@ /* eslint-disable no-console */ -import { execSync } from 'node:child_process' +import { exec } from 'node:child_process' import * as fs from 'node:fs' import * as fsp from 'node:fs/promises' import * as path from 'node:path' +import { promisify } from 'node:util' import * as babel from '@babel/core' import * as fsWalk from '@nodelib/fs.walk' @@ -56,7 +57,7 @@ const extMap = { cjs: '.js', esm: '.mjs' } async function buildFiles({ files, module, outDir }) { console.log(`building for ${module}`) - execSync(`npx tsc --outDir ${outDir}`, { stdio: 'inherit' }) + await promisify(exec)(`npx tsc --outDir ${outDir}`, { stdio: 'inherit' }) const opt = options(module) for (const file of files) { @@ -71,17 +72,17 @@ async function buildFiles({ files, module, outDir }) { const distCodePath = outFilePath.replace(/\.[tj]s$/g, extMap[module]) if (file.path.endsWith('.d.ts')) { - fs.copyFileSync(file.path, outFilePath) + await fsp.copyFile(file.path, outFilePath) continue } try { - const result = await babel.transformAsync(fs.readFileSync(file.path).toString(), { + const result = await babel.transformAsync(await fsp.readFile(file.path, 'utf-8'), { filename: file.path, ...opt, }) - fs.writeFileSync(distCodePath, result.code) + await fsp.writeFile(distCodePath, result.code) } catch (e) { console.error(`failed to transpile ${file.path}`) throw e @@ -93,17 +94,11 @@ async function main() { await fsp.rm('dist', { recursive: true, force: true }) const entries = fsWalk.walkSync('src/') - await buildFiles({ - files: entries, - module: 'cjs', - outDir: './dist/main/', - }) - - await buildFiles({ - files: entries, - module: 'esm', - outDir: './dist/esm/', - }) + + await Promise.all([ + buildFiles({ files: entries, module: 'cjs', outDir: './dist/main/' }), + buildFiles({ files: entries, module: 'esm', outDir: './dist/esm/' }), + ]) for (const file of fsWalk.walkSync('dist/esm/')) { if (file.dirent.isDirectory()) { From 1712bd5aa4700f60811c75db0d154e5f71f41ac1 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 00:23:28 +0800 Subject: [PATCH 53/78] bucket and object js required --- src/helpers.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/helpers.ts b/src/helpers.ts index 46cab2b9..330132c3 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -49,8 +49,8 @@ export class CopySourceOptions { * @param Encryption */ constructor({ - Bucket = '', - Object = '', + Bucket, + Object, VersionID = '', MatchETag = '', NoMatchETag = '', @@ -61,8 +61,8 @@ export class CopySourceOptions { End = 0, Encryption = undefined, }: { - Bucket?: string - Object?: string + Bucket: string + Object: string VersionID?: string MatchETag?: string NoMatchETag?: string @@ -72,7 +72,7 @@ export class CopySourceOptions { Start?: number End?: number Encryption?: Encryption - } = {}) { + }) { this.Bucket = Bucket this.Object = Object this.VersionID = VersionID From aafe0fac3b2a2ec8386e42754dc16b3ce994fc4f Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 00:36:34 +0800 Subject: [PATCH 54/78] fix request header --- src/helpers.ts | 8 ++++---- src/internal/helper.ts | 6 ++---- src/internal/type.ts | 2 ++ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/helpers.ts b/src/helpers.ts index 330132c3..d4659045 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -104,12 +104,12 @@ export class CopySourceOptions { return true } - getHeaders() { + getHeaders(): Header { const headerOptions: Header = {} headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) if (!isEmpty(this.VersionID)) { - headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID + headerOptions['x-amz-copy-source'] = `${encodeURI(this.Bucket + '/' + this.Object)}?versionId=${this.VersionID}` } if (!isEmpty(this.MatchETag)) { @@ -201,9 +201,9 @@ export class CopyDestinationOptions { this.RetainUntilDate = RetainUntilDate } - getHeaders(): Record { + getHeaders(): Header { const replaceDirective = 'REPLACE' - const headerOptions: Record = {} + const headerOptions: Header = {} const userTags = this.UserTags if (!isEmpty(userTags)) { diff --git a/src/internal/helper.ts b/src/internal/helper.ts index ad234e1d..d645bd20 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -23,7 +23,7 @@ import ipaddr from 'ipaddr.js' import _ from 'lodash' import * as mime from 'mime-types' -import type { Binary, Encryption, ObjectMetaData, ResponseHeader } from './type.ts' +import type { Binary, Encryption, Header, ObjectMetaData, ResponseHeader } from './type.ts' import { ENCRYPTION_TYPES } from './type.ts' /** @@ -498,7 +498,7 @@ const ENCRYPTION_HEADERS = { * @param encConfig * @returns an object with key value pairs that can be used in headers. */ -export function getEncryptionHeaders(encConfig: Encryption): Record { +export function getEncryptionHeaders(encConfig: Encryption): Header { const encType = encConfig.type const encHeaders = {} if (!isEmpty(encType)) { @@ -510,8 +510,6 @@ export function getEncryptionHeaders(encConfig: Encryption): Record export type ObjectMetaData = Record + +// request header export type Header = Record import type * as http from 'node:http' import type * as https from 'node:https' From 401aded1d5681f736fb74388d47f9acde515a1b0 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 00:44:09 +0800 Subject: [PATCH 55/78] fix enc headers --- src/internal/helper.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/internal/helper.ts b/src/internal/helper.ts index d645bd20..38397ceb 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -500,13 +500,11 @@ const ENCRYPTION_HEADERS = { */ export function getEncryptionHeaders(encConfig: Encryption): Header { const encType = encConfig.type - const encHeaders = {} + if (!isEmpty(encType)) { if (encType === ENCRYPTION_TYPES.SSEC) { return { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - [encHeaders[ENCRYPTION_HEADERS.sseGenericHeader]]: 'AES256', + [ENCRYPTION_HEADERS.sseGenericHeader]: 'AES256', } } else if (encType === ENCRYPTION_TYPES.KMS) { return { @@ -516,7 +514,7 @@ export function getEncryptionHeaders(encConfig: Encryption): Header { } } - return encHeaders + return {} } export function partsRequired(size: number): number { From 826eda16481f73fe6e8c18bc013ee07f1aa6b005 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 00:54:04 +0800 Subject: [PATCH 56/78] fix enc type --- src/internal/type.ts | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/internal/type.ts b/src/internal/type.ts index a26adfc2..7f67ba10 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -10,11 +10,15 @@ export type Header = Record import type * as http from 'node:http' import type * as https from 'node:https' -export type Encryption = { - type: string - SSEAlgorithm?: string - KMSMasterKeyID?: string -} +export type Encryption = + | { + type: ENCRYPTION_TYPES.SSEC + } + | { + type: ENCRYPTION_TYPES.KMS + SSEAlgorithm?: string + KMSMasterKeyID?: string + } export enum ENCRYPTION_TYPES { /** From 4ec735a13d8b5bc6cbc4467835c30a1e50affd59 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 01:01:33 +0800 Subject: [PATCH 57/78] import first --- src/internal/type.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/internal/type.ts b/src/internal/type.ts index 7f67ba10..a0e6e301 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -1,3 +1,6 @@ +import type * as http from 'node:http' +import type * as https from 'node:https' + export type Binary = string | Buffer // nodejs IncomingHttpHeaders is Record, but it's actually this: @@ -7,8 +10,6 @@ export type ObjectMetaData = Record // request header export type Header = Record -import type * as http from 'node:http' -import type * as https from 'node:https' export type Encryption = | { From e98ed16ddd32ac7d011e113bb09f1cc109b7fdce Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 01:16:06 +0800 Subject: [PATCH 58/78] import as es --- src/helpers.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/helpers.ts b/src/helpers.ts index d4659045..955d17d4 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -1,7 +1,7 @@ import * as fs from 'node:fs' import * as path from 'node:path' -import querystring from 'query-string' +import * as querystring from 'query-string' import * as errors from './errors.ts' import { From d3cd80ded4aaf4f10e9a319e5882374fcb28d7ac Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 01:37:10 +0800 Subject: [PATCH 59/78] move to internal --- src/{ => internal}/s3-endpoints.ts | 2 +- src/minio.d.ts | 4 ++-- src/minio.js | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) rename src/{ => internal}/s3-endpoints.ts (97%) diff --git a/src/s3-endpoints.ts b/src/internal/s3-endpoints.ts similarity index 97% rename from src/s3-endpoints.ts rename to src/internal/s3-endpoints.ts index 18b171ef..141f08ff 100644 --- a/src/s3-endpoints.ts +++ b/src/internal/s3-endpoints.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { isString } from './internal/helper.ts' +import { isString } from './helper.ts' // List of currently supported endpoints. const awsS3Endpoint = { diff --git a/src/minio.d.ts b/src/minio.d.ts index 00c473c5..7c1fac2f 100644 --- a/src/minio.d.ts +++ b/src/minio.d.ts @@ -11,11 +11,11 @@ import type { RETENTION_MODES, RETENTION_VALIDITY_UNITS, } from './helpers.ts' +import type { Region } from './internal/s3-endpoints.ts' import type { Transport } from './internal/type.ts' -import type { Region } from './s3-endpoints.ts' export * from './helpers.ts' -export type { Region } from './s3-endpoints.ts' +export type { Region } from './internal/s3-endpoints.ts' // Exports only from typings export type NotificationEvent = diff --git a/src/minio.js b/src/minio.js index 47d11ee4..34a01a4a 100644 --- a/src/minio.js +++ b/src/minio.js @@ -66,11 +66,11 @@ import { uriEscape, uriResourceEscape, } from './internal/helper.ts' +import { getS3Endpoint } from './internal/s3-endpoints.ts' import { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' import { NotificationConfig, NotificationPoller } from './notification.js' import { ObjectUploader } from './object-uploader.js' import { promisify } from './promisify.js' -import { getS3Endpoint } from './s3-endpoints.ts' import { postPresignSignatureV4, presignSignatureV4, signV4 } from './signing.js' import * as transformers from './transformers.js' import { parseSelectObjectContentResponse } from './xml-parsers.js' From 3dadd5e19905a7fdaada1227a040c6cdffcbb699 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 06:03:16 +0800 Subject: [PATCH 60/78] type name --- src/postPolicy.ts | 4 ++-- src/typedBase.ts | 17 ++++++++++------- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/src/postPolicy.ts b/src/postPolicy.ts index 769c7e66..d383b36c 100644 --- a/src/postPolicy.ts +++ b/src/postPolicy.ts @@ -2,7 +2,7 @@ import { isObject } from './assert.ts' import * as errors from './errors.ts' import { isValidBucketName, isValidObjectName, isValidPrefix } from './internal/helper.ts' -import type { ObjectMetaData as MetaData } from './internal/type.ts' +import type { ObjectMetaData } from './internal/type.ts' export class PostPolicy { public policy: { conditions: (string | number)[][]; expiration?: string } @@ -92,7 +92,7 @@ export class PostPolicy { } // set user defined metadata - setUserMetaData(metaData: MetaData) { + setUserMetaData(metaData: ObjectMetaData) { if (!isObject(metaData)) { throw new TypeError('metadata should be of type "object"') } diff --git a/src/typedBase.ts b/src/typedBase.ts index 59fb5b3a..bc71b7c5 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -53,7 +53,7 @@ import { } from './internal/helper.ts' import type { Region } from './internal/s3-endpoints.ts' import { getS3Endpoint } from './internal/s3-endpoints.ts' -import type { ObjectMetaData as MetaData, ResponseHeader } from './internal/type.ts' +import type { ObjectMetaData, ResponseHeader } from './internal/type.ts' import { qs } from './qs.ts' import { drainResponse, readAsBuffer, readAsString } from './response.ts' import { signV4 } from './signing.ts' @@ -1247,7 +1247,7 @@ export class TypedBase { bucketName: string, objectName: string, filePath: string, - metaDataOrCallback?: MetaData, + metaDataOrCallback?: ObjectMetaData, maybeCallback?: NoResultCallback, ) { if (!isValidBucketName(bucketName)) { @@ -1261,7 +1261,10 @@ export class TypedBase { throw new TypeError('filePath should be of type "string"') } - let [[metaData = {}], callback] = findCallback<[MetaData], NoResultCallback>([metaDataOrCallback, maybeCallback]) + let [[metaData = {}], callback] = findCallback<[ObjectMetaData], NoResultCallback>([ + metaDataOrCallback, + maybeCallback, + ]) if (!isObject(metaData)) { throw new TypeError('metaData should be of type "object"') @@ -1792,13 +1795,13 @@ export class TypedBase { getUploader( bucketName: string, objectName: string, - metaData: MetaData, + metaData: ObjectMetaData, multipart: false, ): (buf: Buffer, length: number, sha256sum: string, md5sum: string) => Promise getUploader( bucketName: string, objectName: string, - metaData: MetaData, + metaData: ObjectMetaData, multipart: true, ): ( uploadId: string, @@ -1810,7 +1813,7 @@ export class TypedBase { ) => Promise // a part of the multipart. - getUploader(bucketName: string, objectName: string, metaData: MetaData, multipart: boolean) { + getUploader(bucketName: string, objectName: string, metaData: ObjectMetaData, multipart: boolean) { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -1980,7 +1983,7 @@ export async function uploadStream({ client: TypedBase bucketName: string objectName: string - metaData: MetaData + metaData: ObjectMetaData stream: stream.Readable partSize: number }): Promise { From 6b83b7d0f511aead5932ec1a630b401f240292fd Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 06:09:14 +0800 Subject: [PATCH 61/78] fix arg name --- src/type.ts | 1 - src/typed-client2.ts | 4 ++-- src/typedBase.ts | 9 ++++----- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/src/type.ts b/src/type.ts index ebde42e0..72c673db 100644 --- a/src/type.ts +++ b/src/type.ts @@ -3,7 +3,6 @@ import type { Readable as ReadableStream } from 'node:stream' import type { LEGAL_HOLD_STATUS } from './internal/type.ts' import type { RETENTION_MODES } from './internal/type.ts' -export type Binary = string | Buffer export type RequestHeaders = Record export interface IRequest { diff --git a/src/typed-client2.ts b/src/typed-client2.ts index 20bcd7c3..182cfc24 100644 --- a/src/typed-client2.ts +++ b/src/typed-client2.ts @@ -850,7 +850,7 @@ export class Client extends TypedClient { } // Ensures Metadata has appropriate prefix for A3 API - metaData = prependXAMZMeta(metaData) + const headers = prependXAMZMeta(metaData) if (typeof stream === 'string' || stream instanceof Buffer) { // Adapts the non-stream interface into a stream. if (size !== undefined) { @@ -907,7 +907,7 @@ export class Client extends TypedClient { partSize, bucketName, objectName, - metaData, + headers, }) } diff --git a/src/typedBase.ts b/src/typedBase.ts index bc71b7c5..23dfb0d5 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -53,13 +53,12 @@ import { } from './internal/helper.ts' import type { Region } from './internal/s3-endpoints.ts' import { getS3Endpoint } from './internal/s3-endpoints.ts' -import type { ObjectMetaData, ResponseHeader } from './internal/type.ts' +import type { Binary, ObjectMetaData, ResponseHeader } from './internal/type.ts' import { qs } from './qs.ts' import { drainResponse, readAsBuffer, readAsString } from './response.ts' import { signV4 } from './signing.ts' import * as transformers from './transformers.ts' import type { - Binary, BucketItemFromList, BucketItemStat, GetObjectOpt, @@ -1976,14 +1975,14 @@ export async function uploadStream({ client, bucketName, objectName, - metaData, + headers, stream: source, partSize, }: { client: TypedBase bucketName: string objectName: string - metaData: ObjectMetaData + headers: RequestHeaders stream: stream.Readable partSize: number }): Promise { @@ -1998,7 +1997,7 @@ export async function uploadStream({ const previousUploadId = await client.findUploadId(bucketName, objectName) let uploadId: string if (!previousUploadId) { - uploadId = await client.initiateNewMultipartUpload(bucketName, objectName, metaData) + uploadId = await client.initiateNewMultipartUpload(bucketName, objectName, headers) } else { uploadId = previousUploadId const oldTags = await client.listParts(bucketName, objectName, previousUploadId) From cad3f6065121b6d5379576476e89e1a8867e70c0 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 06:44:50 +0800 Subject: [PATCH 62/78] fix metadata --- src/typed-client2.ts | 2 +- src/typedBase.ts | 25 ++++++------ tests/functional/functional-tests.js | 59 +++++++++++++++------------- 3 files changed, 46 insertions(+), 40 deletions(-) diff --git a/src/typed-client2.ts b/src/typed-client2.ts index 182cfc24..f1faf31f 100644 --- a/src/typed-client2.ts +++ b/src/typed-client2.ts @@ -895,7 +895,7 @@ export class Client extends TypedClient { const partSize = this.calculatePartSize(size) if (typeof stream === 'string' || Buffer.isBuffer(stream) || size <= this.partSize) { - const uploader = this.getUploader(bucketName, objectName, metaData, false) + const uploader = this.getUploader(bucketName, objectName, headers, false) const buf = isReadableStream(stream) ? await readAsBuffer(stream) : Buffer.from(stream) const { md5sum, sha256sum } = transformers.hashBinary(buf, this.enableSHA256) return uploader(buf, buf.length, sha256sum, md5sum) diff --git a/src/typedBase.ts b/src/typedBase.ts index 23dfb0d5..310ef6cb 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -11,6 +11,7 @@ import BlockStream2 from 'block-stream2' import { isBrowser } from 'browser-or-node' import _ from 'lodash' import { mkdirp } from 'mkdirp' +import * as querystring from 'query-string' import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' @@ -424,12 +425,14 @@ export class TypedBase { // Use any request option specified in minioClient.setRequestOptions() reqOptions = Object.assign({}, this.reqOptions, reqOptions) + const reqHeaders = _.mapValues( + Object.fromEntries(Object.entries(reqOptions.headers).filter(([_, value]) => value !== undefined)), + (v) => v?.toString(), + ) as Record + return { ...reqOptions, - headers: _.mapValues( - Object.fromEntries(Object.entries(reqOptions.headers).filter(([_, value]) => value !== undefined)), - (v) => v?.toString(), - ) as Record, + headers: reqHeaders, host, port, path, @@ -618,7 +621,7 @@ export class TypedBase { * * @internal */ - makeRequestStreamAsync( + async makeRequestStreamAsync( options: RequestOption, stream: stream.Readable | Buffer, sha256sum: string, @@ -1769,7 +1772,7 @@ export class TypedBase { throw new TypeError('callback should be of type "function"') } - const query = qs(statOpts) + const query = querystring.stringify(statOpts) const method = 'HEAD' return asCallbackFn(cb, async () => { const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) @@ -1794,13 +1797,13 @@ export class TypedBase { getUploader( bucketName: string, objectName: string, - metaData: ObjectMetaData, + extraHeaders: RequestHeaders, multipart: false, ): (buf: Buffer, length: number, sha256sum: string, md5sum: string) => Promise getUploader( bucketName: string, objectName: string, - metaData: ObjectMetaData, + extraHeaders: RequestHeaders, multipart: true, ): ( uploadId: string, @@ -1812,7 +1815,7 @@ export class TypedBase { ) => Promise // a part of the multipart. - getUploader(bucketName: string, objectName: string, metaData: ObjectMetaData, multipart: boolean) { + getUploader(bucketName: string, objectName: string, extraHeaders: RequestHeaders, multipart: boolean) { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -1822,7 +1825,7 @@ export class TypedBase { if (!isBoolean(multipart)) { throw new TypeError('multipart should be of type "boolean"') } - if (!isObject(metaData)) { + if (!isObject(extraHeaders)) { throw new TypeError('metadata should be of type "object"') } @@ -1876,7 +1879,7 @@ export class TypedBase { let headers: RequestHeaders = { 'Content-Length': length } if (!multipart) { - headers = Object.assign({}, metaData, headers) + headers = Object.assign({}, extraHeaders, headers) } if (!this.enableSHA256) { diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index 84331aeb..1200ebfb 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -539,18 +539,23 @@ describe('functional tests', function () { step( `putObject(bucketName, objectName, stream, metadata, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, - (done) => { - var stream = readableStream(_65mb) - client.putObject(bucketName, _65mbObjectName, stream, metaData, () => { - setTimeout(() => { - if (Object.values(httpAgent.sockets).length === 0) { - return done() - } - done(new Error('http request did not release network socket')) - }, 100) - }) + async () => { + const stream = readableStream(_65mb) + await client.putObject(bucketName, _65mbObjectName, stream, metaData) + + for (;;) { + await new Promise((resolve) => { + setTimeout(() => { + resolve() + }), + 100 + }) + if (Object.values(httpAgent.sockets).length === 0) { + return + } + } }, - ).timeout(5000) + ).timeout(15000) step(`getObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { var hash = crypto.createHash('md5') @@ -690,6 +695,21 @@ describe('functional tests', function () { }, ) + step( + `statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, + async () => { + const stat = await client.statObject(bucketName, _100kbObjectName) + if (stat.size !== _100kb.length) { + throw new Error('size mismatch') + } + assert.equal(stat.metaData['content-type'], metaData['Content-Type']) + assert.equal(stat.metaData['Testing'], metaData['Testing']) + assert.equal(stat.metaData['randomstuff'], metaData['randomstuff']) + etag = stat.etag + modifiedDate = stat.modifiedDate + }, + ) + step( `copyObject(bucketName, objectName, srcObject, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}_`, (done) => { @@ -702,23 +722,6 @@ describe('functional tests', function () { }, ) - step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { - client.statObject(bucketName, _100kbObjectName, (e, stat) => { - if (e) { - return done(e) - } - if (stat.size !== _100kb.length) { - return done(new Error('size mismatch')) - } - assert.equal(stat.metaData['content-type'], metaData['Content-Type']) - assert.equal(stat.metaData['Testing'], metaData['Testing']) - assert.equal(stat.metaData['randomstuff'], metaData['randomstuff']) - etag = stat.etag - modifiedDate = stat.modifiedDate - done() - }) - }) - step( `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:ExceptIncorrectEtag_`, (done) => { From 535cbb80ddd60027e6fc3e98b310bb1ad6d9fdc5 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 06:58:02 +0800 Subject: [PATCH 63/78] fix --- src/assert.ts | 4 ++++ src/typedBase.ts | 8 ++------ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/assert.ts b/src/assert.ts index 521f94dd..51a34995 100644 --- a/src/assert.ts +++ b/src/assert.ts @@ -78,3 +78,7 @@ export function isValidDate(arg: unknown): arg is Date { // @ts-expect-error TS(2345): Argument of type 'Date' is not assignable to param... Remove this comment to see the full error message return arg instanceof Date && !isNaN(arg) } + +export function isDefined(o: T): o is NonNullable { + return o !== null && o !== undefined +} diff --git a/src/typedBase.ts b/src/typedBase.ts index 310ef6cb..e83c711b 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -18,6 +18,7 @@ import { asCallback, asCallbackFn } from './as-callback.ts' import type { AnyFunction } from './assert.ts' import { isBoolean, + isDefined, isEmpty, isFunction, isNumber, @@ -425,14 +426,9 @@ export class TypedBase { // Use any request option specified in minioClient.setRequestOptions() reqOptions = Object.assign({}, this.reqOptions, reqOptions) - const reqHeaders = _.mapValues( - Object.fromEntries(Object.entries(reqOptions.headers).filter(([_, value]) => value !== undefined)), - (v) => v?.toString(), - ) as Record - return { ...reqOptions, - headers: reqHeaders, + headers: _.mapValues(_.pickBy(reqOptions.headers, isDefined), (v) => v.toString()), host, port, path, From 76afbc318f62295670745ed974dc81b13eb95f81 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 07:10:56 +0800 Subject: [PATCH 64/78] move files --- src/{ => internal}/async.ts | 2 +- src/request.ts | 29 ----------------------------- src/response.ts | 26 -------------------------- src/typed-client.ts | 4 ++-- src/typed-client2.ts | 4 ++-- src/typedBase.ts | 4 ++-- 6 files changed, 7 insertions(+), 62 deletions(-) rename src/{ => internal}/async.ts (94%) delete mode 100644 src/request.ts delete mode 100644 src/response.ts diff --git a/src/async.ts b/src/internal/async.ts similarity index 94% rename from src/async.ts rename to src/internal/async.ts index b3ac9cb7..0eca5ffd 100644 --- a/src/async.ts +++ b/src/internal/async.ts @@ -1,4 +1,4 @@ -// promise helper for stdlibl +// promise helper for stdlib import * as fs from 'node:fs' import * as stream from 'node:stream' diff --git a/src/request.ts b/src/request.ts deleted file mode 100644 index 6846f6fc..00000000 --- a/src/request.ts +++ /dev/null @@ -1,29 +0,0 @@ -import * as http from 'node:http' -import * as https from 'node:https' -import type * as stream from 'node:stream' - -export async function request( - opt: https.RequestOptions, - isHttp: boolean, - body: Buffer | string | stream.Readable | undefined = undefined, -): Promise { - const transport = isHttp ? http : https - - return new Promise((resolve, reject) => { - const requestObj = transport.request(opt, (resp) => { - resolve(resp) - }) - - requestObj.on('error', (e: unknown) => { - reject(e) - }) - - if (body) { - if (!Buffer.isBuffer(body) && typeof body !== 'string') { - body.on('error', reject) - } - - requestObj.end(body) - } - }) -} diff --git a/src/response.ts b/src/response.ts deleted file mode 100644 index bb3a0b15..00000000 --- a/src/response.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type http from 'node:http' -import type stream from 'node:stream' - -export async function readAsBuffer(res: stream.Readable): Promise { - return new Promise((resolve, reject) => { - const body: Buffer[] = [] - res - .on('data', (chunk: Buffer) => body.push(chunk)) - .on('error', (e) => reject(e)) - .on('end', () => resolve(Buffer.concat(body))) - }) -} - -export async function readAsString(res: http.IncomingMessage): Promise { - const body = await readAsBuffer(res) - return body.toString() -} - -export async function drainResponse(res: stream.Readable): Promise { - return new Promise((resolve, reject) => { - res - .on('data', () => {}) - .on('error', (e) => reject(e)) - .on('end', () => resolve()) - }) -} diff --git a/src/typed-client.ts b/src/typed-client.ts index b5967926..4d7d25d1 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -14,10 +14,10 @@ import { isString, isValidDate, } from './assert.ts' -import { fsp } from './async.ts' import * as errors from './errors.ts' import type { SelectResults } from './helpers.ts' import { LEGAL_HOLD_STATUS, RETENTION_MODES } from './helpers.ts' +import { fsp } from './internal/async.ts' import { getScope, insertContentType, @@ -29,10 +29,10 @@ import { toMd5, uriEscape, } from './internal/helper.ts' +import { readAsBuffer } from './internal/response.ts' import type { ObjectMetaData as MetaData } from './internal/type.ts' import { PostPolicy } from './postPolicy.ts' import { qs } from './qs.ts' -import { readAsBuffer } from './response.ts' import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' import * as transformers from './transformers.ts' import type { diff --git a/src/typed-client2.ts b/src/typed-client2.ts index f1faf31f..7735f2c4 100644 --- a/src/typed-client2.ts +++ b/src/typed-client2.ts @@ -15,10 +15,10 @@ import { isReadableStream, isString, } from './assert.ts' -import { fsp } from './async.ts' import { CopyConditions } from './copyConditions.ts' import * as errors from './errors.ts' import { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' +import { fsp } from './internal/async.ts' import { calculateEvenSplits, extractMetadata, @@ -37,11 +37,11 @@ import { uriEscape, uriResourceEscape, } from './internal/helper.ts' +import { readAsBuffer } from './internal/response.ts' import type { ObjectMetaData, ResponseHeader } from './internal/type.ts' import { RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' import type { NotificationEvent } from './notification.ts' import { NotificationConfig, NotificationPoller } from './notification.ts' -import { readAsBuffer } from './response.ts' import * as transformers from './transformers.ts' import type { BucketItemCopy, diff --git a/src/typedBase.ts b/src/typedBase.ts index e83c711b..14db1d53 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -27,12 +27,12 @@ import { isReadableStream, isString, } from './assert.ts' -import { fsp, streamPromise } from './async.ts' import { CredentialProvider } from './CredentialProvider.ts' import * as errors from './errors.ts' import { S3Error } from './errors.ts' import { extensions } from './extensions.ts' import { DEFAULT_REGION } from './helpers.ts' +import { fsp, streamPromise } from './internal/async.ts' import { extractMetadata, getVersionId, @@ -53,11 +53,11 @@ import { uriEscape, uriResourceEscape, } from './internal/helper.ts' +import { drainResponse, readAsBuffer, readAsString } from './internal/response.ts' import type { Region } from './internal/s3-endpoints.ts' import { getS3Endpoint } from './internal/s3-endpoints.ts' import type { Binary, ObjectMetaData, ResponseHeader } from './internal/type.ts' import { qs } from './qs.ts' -import { drainResponse, readAsBuffer, readAsString } from './response.ts' import { signV4 } from './signing.ts' import * as transformers from './transformers.ts' import type { From 1561886cab53be6bf536b1f0cfc2c5ab248c903e Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 07:19:23 +0800 Subject: [PATCH 65/78] fix --- src/helpers.ts | 7 +- src/internal/helper.ts | 4 +- src/internal/type.ts | 234 ++++++++++++++++++++++++++++++++++++++- src/signing.ts | 2 +- src/type.ts | 241 ----------------------------------------- src/typed-client.ts | 12 +- src/typed-client2.ts | 13 ++- src/typedBase.ts | 12 +- src/xml-parsers.ts | 9 +- 9 files changed, 264 insertions(+), 270 deletions(-) delete mode 100644 src/type.ts diff --git a/src/helpers.ts b/src/helpers.ts index e54fd93c..b6b03be4 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -14,9 +14,8 @@ import { isValidBucketName, isValidObjectName, } from './internal/helper.ts' -import type { Encryption, Header, ObjectMetaData } from './internal/type.ts' +import type { Encryption, ObjectMetaData, RequestHeaders, RequestHeaders } from './internal/type.ts' import { RETENTION_MODES } from './internal/type.ts' -import type { RequestHeaders } from './type.ts' export { ENCRYPTION_TYPES, LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' @@ -105,8 +104,8 @@ export class CopySourceOptions { return true } - getHeaders(): Header { - const headerOptions: Header = {} + getHeaders(): RequestHeaders { + const headerOptions: RequestHeaders = {} headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) if (!isEmpty(this.VersionID)) { diff --git a/src/internal/helper.ts b/src/internal/helper.ts index 38397ceb..25c46251 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -23,7 +23,7 @@ import ipaddr from 'ipaddr.js' import _ from 'lodash' import * as mime from 'mime-types' -import type { Binary, Encryption, Header, ObjectMetaData, ResponseHeader } from './type.ts' +import type { Binary, Encryption, ObjectMetaData, RequestHeaders, ResponseHeader } from './type.ts' import { ENCRYPTION_TYPES } from './type.ts' /** @@ -498,7 +498,7 @@ const ENCRYPTION_HEADERS = { * @param encConfig * @returns an object with key value pairs that can be used in headers. */ -export function getEncryptionHeaders(encConfig: Encryption): Header { +export function getEncryptionHeaders(encConfig: Encryption): RequestHeaders { const encType = encConfig.type if (!isEmpty(encType)) { diff --git a/src/internal/type.ts b/src/internal/type.ts index a0e6e301..7ef6bc56 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -1,5 +1,6 @@ import type * as http from 'node:http' import type * as https from 'node:https' +import type { Readable as ReadableStream } from 'node:stream' export type Binary = string | Buffer @@ -8,9 +9,6 @@ export type ResponseHeader = Record export type ObjectMetaData = Record -// request header -export type Header = Record - export type Encryption = | { type: ENCRYPTION_TYPES.SSEC @@ -48,3 +46,233 @@ export enum LEGAL_HOLD_STATUS { } export type Transport = typeof http | typeof https +export type RequestHeaders = Record + +export interface UploadedObjectInfo { + etag: string + versionId: string | null +} + +export interface IRequest { + protocol: string + port?: number | string + method: string + path: string + headers: RequestHeaders +} + +export type ICanonicalRequest = string + +export interface ICredentials { + accessKey: string + secretKey: string + sessionToken?: string +} + +export type UploadID = string +export type LegalHoldStatus = 'ON' | 'OFF' +export type NoResultCallback = (error: unknown | null) => void +export type ResultCallback = (error: unknown | null, result: T) => void +export type TagList = Record +export type EmptyObject = Record +export type VersionIdentification = { versionId?: string } +export type Lifecycle = LifecycleConfig | null | '' +export type Lock = LockConfig | EmptyObject +export type Retention = RetentionOptions | EmptyObject +export type IsoDate = string +export type GetObjectOpt = { + versionId?: string +} + +export interface BucketItemCopy { + etag: string + lastModified?: Date +} + +export interface BucketItem { + name: string + prefix: string + size: number + etag: string + lastModified: Date +} + +export interface BucketItemWithMetadata extends BucketItem { + metadata: ItemBucketMetadata | ItemBucketMetadataList +} + +export type StatObjectOpts = { + versionId?: string +} + +export interface BucketItemStat { + size: number + etag: string + lastModified: Date + metaData: ItemBucketMetadata + // version id of the object if available + versionId: string | null +} + +export interface IncompleteUploadedBucketItem { + key: string + uploadId: string + size: number +} + +export interface BucketStream extends ReadableStream { + on(event: 'data', listener: (item: T) => void): this + + on(event: 'end' | 'pause' | 'readable' | 'resume' | 'close', listener: () => void): this + + on(event: 'error', listener: (err: Error) => void): this + + on(event: string | symbol, listener: (...args: any[]) => void): this +} + +export interface PostPolicyResult { + postURL: string + formData: { + [key: string]: any + } +} + +export interface MetadataItem { + Key: string + Value: string +} + +export interface ItemBucketMetadataList { + Items: MetadataItem[] +} + +export interface ItemBucketMetadata { + [key: string]: any +} + +export interface Tag { + Key: string + Value: string +} + +export interface LifecycleConfig { + Rule: LifecycleRule[] +} + +export interface LifecycleRule { + [key: string]: any +} + +export interface LockConfig { + objectLockEnabled?: 'Enabled' + mode: LEGAL_HOLD_STATUS + unit: RETENTION_VALIDITY_UNITS + validity: number +} + +export interface EncryptionConfig { + Rule: EncryptionRule[] +} + +export interface EncryptionRule { + [key: string]: any +} + +export interface ReplicationConfig { + role: string + rules: [] +} + +export interface ReplicationConfig { + [key: string]: any +} + +export interface RetentionOptions { + versionId: string + mode?: RETENTION_MODES + retainUntilDate?: IsoDate + governanceBypass?: boolean +} + +export interface LegalHoldOptions { + versionId?: string + status: LEGAL_HOLD_STATUS +} + +export interface InputSerialization { + CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' + CSV?: { + AllowQuotedRecordDelimiter?: boolean + Comments?: string + FieldDelimiter?: string + FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' + QuoteCharacter?: string + QuoteEscapeCharacter?: string + RecordDelimiter?: string + } + JSON?: { + Type: 'DOCUMENT' | 'LINES' + } + Parquet?: EmptyObject +} + +export interface OutputSerialization { + CSV?: { + FieldDelimiter?: string + QuoteCharacter?: string + QuoteEscapeCharacter?: string + QuoteFields?: string + RecordDelimiter?: string + } + JSON?: { + RecordDelimiter?: string + } +} + +export interface SelectOptions { + expression: string + expressionType?: string + inputSerialization: InputSerialization + outputSerialization: OutputSerialization + requestProgress?: { Enabled: boolean } + scanRange?: { Start: number; End: number } +} + +export interface SourceObjectStats { + size: number + metaData: string + lastModicied: Date + versionId: string + etag: string +} + +export interface MakeBucketOpt { + ObjectLocking?: boolean +} + +export interface RemoveOptions { + versionId?: string + forceDelete?: boolean + governanceBypass?: boolean +} + +export interface BucketItemFromList { + name: string + // date when bucket was created + creationDate: Date +} + +export type VersioningConfig = Record + +export interface VersionConfigInput { + Status?: string + MfaDelete?: string + + [key: string]: any +} + +export type ListObjectV1Opt = { + Delimiter?: string + MaxKeys?: number + IncludeVersion?: boolean +} diff --git a/src/signing.ts b/src/signing.ts index e894dcbc..33323a39 100644 --- a/src/signing.ts +++ b/src/signing.ts @@ -19,7 +19,7 @@ import * as Crypto from 'node:crypto' import { isNumber, isObject, isString } from './assert.ts' import * as errors from './errors.ts' import { getScope, makeDateLong, makeDateShort, uriEscape } from './internal/helper.ts' -import type { ICanonicalRequest, IRequest, RequestHeaders } from './type.ts' +import type { ICanonicalRequest, IRequest, RequestHeaders } from './internal/type.ts' const signV4Algorithm = 'AWS4-HMAC-SHA256' diff --git a/src/type.ts b/src/type.ts deleted file mode 100644 index 72c673db..00000000 --- a/src/type.ts +++ /dev/null @@ -1,241 +0,0 @@ -import type { Readable as ReadableStream } from 'node:stream' - -import type { LEGAL_HOLD_STATUS } from './internal/type.ts' -import type { RETENTION_MODES } from './internal/type.ts' - -export type RequestHeaders = Record - -export interface IRequest { - protocol: string - port?: number | string - method: string - path: string - headers: RequestHeaders -} - -export type ICanonicalRequest = string - -export interface ICredentials { - accessKey: string - secretKey: string - sessionToken?: string -} - -export type UploadID = string - -export type LockUnit = 'Days' | 'Years' -export type LegalHoldStatus = 'ON' | 'OFF' -export type NoResultCallback = (error: unknown | null) => void -export type ResultCallback = (error: unknown | null, result: T) => void -export type TagList = Record -export type EmptyObject = Record -export type VersionIdentification = { versionId?: string } -export type Lifecycle = LifecycleConfig | null | '' -export type Lock = LockConfig | EmptyObject -export type Encryption = EncryptionConfig | EmptyObject -export type Retention = RetentionOptions | EmptyObject -export type IsoDate = string - -export type GetObjectOpt = { - versionId?: string -} - -export interface BucketItemCopy { - etag: string - lastModified?: Date -} - -export interface BucketItem { - name: string - prefix: string - size: number - etag: string - lastModified: Date -} - -export interface BucketItemWithMetadata extends BucketItem { - metadata: ItemBucketMetadata | ItemBucketMetadataList -} - -export type StatObjectOpts = { - versionId?: string -} - -export interface BucketItemStat { - size: number - etag: string - lastModified: Date - metaData: ItemBucketMetadata - // version id of the object if available - versionId: string | null -} - -export interface IncompleteUploadedBucketItem { - key: string - uploadId: string - size: number -} - -export interface BucketStream extends ReadableStream { - on(event: 'data', listener: (item: T) => void): this - - on(event: 'end' | 'pause' | 'readable' | 'resume' | 'close', listener: () => void): this - - on(event: 'error', listener: (err: Error) => void): this - - on(event: string | symbol, listener: (...args: any[]) => void): this -} - -export interface PostPolicyResult { - postURL: string - formData: { - [key: string]: any - } -} - -export interface MetadataItem { - Key: string - Value: string -} - -export interface ItemBucketMetadataList { - Items: MetadataItem[] -} - -export interface ItemBucketMetadata { - [key: string]: any -} - -export interface UploadedObjectInfo { - etag: string - versionId: string | null -} - -export interface Tag { - Key: string - Value: string -} - -export interface LifecycleConfig { - Rule: LifecycleRule[] -} - -export interface LifecycleRule { - [key: string]: any -} - -export interface LockConfig { - objectLockEnabled?: 'Enabled' - mode: Mode - unit: LockUnit - validity: number -} - -export interface EncryptionConfig { - Rule: EncryptionRule[] -} - -export interface EncryptionRule { - [key: string]: any -} - -export interface ReplicationConfig { - role: string - rules: [] -} - -export interface ReplicationConfig { - [key: string]: any -} - -export interface RetentionOptions { - versionId: string - mode?: RETENTION_MODES - retainUntilDate?: IsoDate - governanceBypass?: boolean -} - -export interface LegalHoldOptions { - versionId?: string - status: LEGAL_HOLD_STATUS -} - -export interface InputSerialization { - CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' - CSV?: { - AllowQuotedRecordDelimiter?: boolean - Comments?: string - FieldDelimiter?: string - FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' - QuoteCharacter?: string - QuoteEscapeCharacter?: string - RecordDelimiter?: string - } - JSON?: { - Type: 'DOCUMENT' | 'LINES' - } - Parquet?: EmptyObject -} - -export interface OutputSerialization { - CSV?: { - FieldDelimiter?: string - QuoteCharacter?: string - QuoteEscapeCharacter?: string - QuoteFields?: string - RecordDelimiter?: string - } - JSON?: { - RecordDelimiter?: string - } -} - -export interface SelectOptions { - expression: string - expressionType?: string - inputSerialization: InputSerialization - outputSerialization: OutputSerialization - requestProgress?: { Enabled: boolean } - scanRange?: { Start: number; End: number } -} - -export interface SourceObjectStats { - size: number - metaData: string - lastModicied: Date - versionId: string - etag: string -} - -export interface MakeBucketOpt { - ObjectLocking?: boolean -} - -export interface RemoveOptions { - versionId?: string - forceDelete?: boolean - governanceBypass?: boolean -} - -export interface BucketItemFromList { - name: string - // date when bucket was created - creationDate: Date -} - -export type VersioningConfig = Record - -export interface VersionConfigInput { - Status?: string - MfaDelete?: string - - [key: string]: any -} - -export type Mode = 'COMPLIANCE' | 'GOVERNANCE' - -export type ListObjectV1Opt = { - Delimiter?: string - MaxKeys?: number - IncludeVersion?: boolean -} diff --git a/src/typed-client.ts b/src/typed-client.ts index 4d7d25d1..7c670809 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -30,11 +30,6 @@ import { uriEscape, } from './internal/helper.ts' import { readAsBuffer } from './internal/response.ts' -import type { ObjectMetaData as MetaData } from './internal/type.ts' -import { PostPolicy } from './postPolicy.ts' -import { qs } from './qs.ts' -import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' -import * as transformers from './transformers.ts' import type { BucketStream, Encryption, @@ -42,6 +37,7 @@ import type { Lifecycle, ListObjectV1Opt, NoResultCallback, + ObjectMetaData as MetaData, PostPolicyResult, RemoveOptions, RequestHeaders, @@ -54,7 +50,11 @@ import type { VersionConfigInput, VersionIdentification, VersioningConfig, -} from './type.ts' +} from './internal/type.ts' +import { PostPolicy } from './postPolicy.ts' +import { qs } from './qs.ts' +import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' +import * as transformers from './transformers.ts' import type { RequestMethod, RequestOption } from './typedBase.ts' import { findCallback, TypedBase } from './typedBase.ts' import type { S3ListObject } from './xml-parsers.ts' diff --git a/src/typed-client2.ts b/src/typed-client2.ts index 7735f2c4..2ee2eacf 100644 --- a/src/typed-client2.ts +++ b/src/typed-client2.ts @@ -38,19 +38,20 @@ import { uriResourceEscape, } from './internal/helper.ts' import { readAsBuffer } from './internal/response.ts' -import type { ObjectMetaData, ResponseHeader } from './internal/type.ts' -import { RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' -import type { NotificationEvent } from './notification.ts' -import { NotificationConfig, NotificationPoller } from './notification.ts' -import * as transformers from './transformers.ts' import type { BucketItemCopy, NoResultCallback, + ObjectMetaData, RequestHeaders, + ResponseHeader, ResultCallback, SourceObjectStats, UploadedObjectInfo, -} from './type.ts' +} from './internal/type.ts' +import { RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' +import type { NotificationEvent } from './notification.ts' +import { NotificationConfig, NotificationPoller } from './notification.ts' +import * as transformers from './transformers.ts' import { TypedClient } from './typed-client.ts' import type { RequestOption } from './typedBase.ts' import { findCallback, uploadStream } from './typedBase.ts' diff --git a/src/typedBase.ts b/src/typedBase.ts index 14db1d53..c886c14f 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -56,22 +56,24 @@ import { import { drainResponse, readAsBuffer, readAsString } from './internal/response.ts' import type { Region } from './internal/s3-endpoints.ts' import { getS3Endpoint } from './internal/s3-endpoints.ts' -import type { Binary, ObjectMetaData, ResponseHeader } from './internal/type.ts' -import { qs } from './qs.ts' -import { signV4 } from './signing.ts' -import * as transformers from './transformers.ts' import type { + Binary, BucketItemFromList, BucketItemStat, GetObjectOpt, IRequest, MakeBucketOpt, NoResultCallback, + ObjectMetaData, RequestHeaders, + ResponseHeader, ResultCallback, StatObjectOpts, UploadedObjectInfo, -} from './type.ts' +} from './internal/type.ts' +import { qs } from './qs.ts' +import { signV4 } from './signing.ts' +import * as transformers from './transformers.ts' import type { Part } from './xml-parsers.ts' import * as xmlParsers from './xml-parsers.ts' diff --git a/src/xml-parsers.ts b/src/xml-parsers.ts index 80f42ceb..9223ab17 100644 --- a/src/xml-parsers.ts +++ b/src/xml-parsers.ts @@ -22,8 +22,13 @@ import * as errors from './errors.ts' import type { RETENTION_MODES } from './helpers.ts' import { RETENTION_VALIDITY_UNITS, SelectResults } from './helpers.ts' import { parseXml, sanitizeETag, sanitizeObjectKey, toArray } from './internal/helper.ts' -import type { ObjectMetaData as MetaData } from './internal/type.ts' -import type { BucketItemCopy, BucketItemFromList, Retention, UploadID } from './type.ts' +import type { + BucketItemCopy, + BucketItemFromList, + ObjectMetaData as MetaData, + Retention, + UploadID, +} from './internal/type.ts' const fxp = new XMLParser() From 96b07dea3d141159d8dc48df0b4f51c13c4f8845 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 07:20:35 +0800 Subject: [PATCH 66/78] fix --- src/helpers.ts | 2 +- src/internal/type.ts | 2 +- src/typed-client.ts | 18 +++++++++--------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/helpers.ts b/src/helpers.ts index b6b03be4..665d1a74 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -14,7 +14,7 @@ import { isValidBucketName, isValidObjectName, } from './internal/helper.ts' -import type { Encryption, ObjectMetaData, RequestHeaders, RequestHeaders } from './internal/type.ts' +import type { Encryption, ObjectMetaData, RequestHeaders } from './internal/type.ts' import { RETENTION_MODES } from './internal/type.ts' export { ENCRYPTION_TYPES, LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' diff --git a/src/internal/type.ts b/src/internal/type.ts index 7ef6bc56..65c1662d 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -171,7 +171,7 @@ export interface LockConfig { } export interface EncryptionConfig { - Rule: EncryptionRule[] + Rule?: EncryptionRule[] } export interface EncryptionRule { diff --git a/src/typed-client.ts b/src/typed-client.ts index 7c670809..4e6d1484 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -32,7 +32,7 @@ import { import { readAsBuffer } from './internal/response.ts' import type { BucketStream, - Encryption, + EncryptionConfig, LegalHoldOptions, Lifecycle, ListObjectV1Opt, @@ -601,9 +601,9 @@ export class TypedClient extends TypedBase { }) } - getBucketEncryption(bucketName: string, callback: ResultCallback): void - getBucketEncryption(bucketName: string): Promise - getBucketEncryption(bucketName: string, cb?: ResultCallback): void | Promise { + getBucketEncryption(bucketName: string, callback: ResultCallback): void + getBucketEncryption(bucketName: string): Promise + getBucketEncryption(bucketName: string, cb?: ResultCallback): void | Promise { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -620,18 +620,18 @@ export class TypedClient extends TypedBase { }) } - setBucketEncryption(bucketName: string, encryptionConfig: Encryption, callback: NoResultCallback): void - setBucketEncryption(bucketName: string, encryptionConfig: Encryption): Promise + setBucketEncryption(bucketName: string, encryptionConfig: EncryptionConfig, callback: NoResultCallback): void + setBucketEncryption(bucketName: string, encryptionConfig: EncryptionConfig): Promise setBucketEncryption( bucketName: string, - encryptionConfigOrCallback: Encryption | NoResultCallback | undefined, + encryptionConfigOrCallback: EncryptionConfig | NoResultCallback | undefined, callback?: NoResultCallback, ): void | Promise { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } - let encryptionConfig: Encryption | undefined + let encryptionConfig: EncryptionConfig | undefined let cb: NoResultCallback | undefined if (isFunction(encryptionConfigOrCallback)) { @@ -642,7 +642,7 @@ export class TypedClient extends TypedBase { cb = callback } - if (!isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) { + if (encryptionConfig && encryptionConfig.Rule && encryptionConfig.Rule.length > 1) { throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed: ' + encryptionConfig.Rule) } if (!isOptionalFunction(cb)) { From ab54a82099db2ce28ebf5acb815b13f738eaf407 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sun, 7 May 2023 07:21:24 +0800 Subject: [PATCH 67/78] type-name --- src/typed-client.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/typed-client.ts b/src/typed-client.ts index 4e6d1484..cba9c75b 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -37,7 +37,7 @@ import type { Lifecycle, ListObjectV1Opt, NoResultCallback, - ObjectMetaData as MetaData, + ObjectMetaData, PostPolicyResult, RemoveOptions, RequestHeaders, @@ -1585,7 +1585,7 @@ export class Helper { bucketName: string, objectName: string, filePath: string, - metaData: MetaData = {}, + metaData: ObjectMetaData = {}, ): Promise { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) From b00873e65d5222fc4faf871099830da321add935 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Tue, 9 May 2023 09:32:18 +0800 Subject: [PATCH 68/78] use eventemitter3 --- package-lock.json | 7 +++++++ package.json | 1 + src/notification.ts | 8 +++++--- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 4c1e1d0d..e2b40e88 100644 --- a/package-lock.json +++ b/package-lock.json @@ -55,6 +55,7 @@ "eslint-plugin-simple-import-sort": "^10.0.0", "eslint-plugin-unicorn": "^47.0.0", "eslint-plugin-unused-imports": "^2.0.0", + "eventemitter3": "^5.0.1", "husky": "^8.0.3", "lint-staged": "^13.2.2", "mocha": "^9.2.2", @@ -4165,6 +4166,12 @@ "node": ">=0.10.0" } }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "dev": true + }, "node_modules/execa": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-7.1.1.tgz", diff --git a/package.json b/package.json index 0577b87e..f6c3d34e 100644 --- a/package.json +++ b/package.json @@ -130,6 +130,7 @@ "eslint-plugin-simple-import-sort": "^10.0.0", "eslint-plugin-unicorn": "^47.0.0", "eslint-plugin-unused-imports": "^2.0.0", + "eventemitter3": "^5.0.1", "husky": "^8.0.3", "lint-staged": "^13.2.2", "mocha": "^9.2.2", diff --git a/src/notification.ts b/src/notification.ts index 002392af..325b765d 100644 --- a/src/notification.ts +++ b/src/notification.ts @@ -14,8 +14,7 @@ * limitations under the License. */ -import { EventEmitter } from 'node:events' - +import { EventEmitter } from 'eventemitter3' import jsonLineParser from 'stream-json/jsonl/Parser.js' import { DEFAULT_REGION } from './helpers.ts' @@ -151,7 +150,10 @@ export type NotificationRecord = unknown // Poll for notifications, used in #listenBucketNotification. // Listening constitutes repeatedly requesting s3 whether or not any // changes have occurred. -export class NotificationPoller extends EventEmitter { +export class NotificationPoller extends EventEmitter<{ + notification: (event: NotificationRecord) => void + error: (error: unknown) => void +}> { private client: TypedBase private bucketName: string private prefix: string From ea7500ac0966bf007e9809984c38abca01480e85 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 13 May 2023 08:31:50 +0800 Subject: [PATCH 69/78] fix --- src/minio.ts | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 src/minio.ts diff --git a/src/minio.ts b/src/minio.ts new file mode 100644 index 00000000..925996fc --- /dev/null +++ b/src/minio.ts @@ -0,0 +1,32 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export { AssumeRoleProvider } from './AssumeRoleProvider.ts' +export { CopyConditions } from './copyConditions.ts' +export { CredentialProvider } from './CredentialProvider.ts' +export { Credentials } from './Credentials.ts' +export { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, ENCRYPTION_TYPES } from './helpers.ts' +export type { NotificationEvent, NotificationRecord } from './notification.ts' +export { + buildARN, + CloudFunctionConfig, + NotificationConfig, + NotificationPoller, + QueueConfig, + TopicConfig, +} from './notification.ts' +export { PostPolicy } from './postPolicy.ts' +export { Client } from './typed-client2.ts' From 38adaf097c9b163fc0f281e19004d74da33bbcda Mon Sep 17 00:00:00 2001 From: Trim21 Date: Sat, 13 May 2023 08:34:39 +0800 Subject: [PATCH 70/78] remove qs --- src/qs.ts | 7 ------- src/typed-client.ts | 6 +++--- src/typedBase.ts | 6 +++--- 3 files changed, 6 insertions(+), 13 deletions(-) delete mode 100644 src/qs.ts diff --git a/src/qs.ts b/src/qs.ts deleted file mode 100644 index 56c17504..00000000 --- a/src/qs.ts +++ /dev/null @@ -1,7 +0,0 @@ -import queryString from 'query-string' - -// rfc 3986 encoding. -// `URLSearchParams` and `node:querystring` won't work -export function qs(q: Record): string { - return queryString.stringify(q) -} diff --git a/src/typed-client.ts b/src/typed-client.ts index 5d8ee292..03ea01c8 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -1,6 +1,7 @@ import * as fsp from 'node:fs/promises' import * as stream from 'node:stream' +import queryString from 'query-string' import { TextEncoder } from 'web-encoding' import xml2js from 'xml2js' @@ -52,7 +53,6 @@ import type { VersioningConfig, } from './internal/type.ts' import { PostPolicy } from './postPolicy.ts' -import { qs } from './qs.ts' import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' import * as transformers from './transformers.ts' import type { RequestMethod, RequestOption } from './typedBase.ts' @@ -737,7 +737,7 @@ export class TypedClient extends TypedBase { headers['x-minio-force-delete'] = true } - const query = qs(queryParams) + const query = queryString.stringify(queryParams) const requestOptions: RequestOption = { method, bucketName, objectName, headers } if (query) { @@ -829,7 +829,7 @@ export class TypedClient extends TypedBase { if (!isOptionalFunction(cb)) { throw new TypeError('callback should be of type "function"') } - const query = qs(reqParams) + const query = queryString.stringify(reqParams) return asCallbackFn(cb, async () => { const region = await this.getBucketRegionAsync(bucketName) diff --git a/src/typedBase.ts b/src/typedBase.ts index 1ae22743..1542d53f 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -12,6 +12,7 @@ import BlockStream2 from 'block-stream2' import { isBrowser } from 'browser-or-node' import _ from 'lodash' import * as querystring from 'query-string' +import queryString from 'query-string' import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' @@ -71,7 +72,6 @@ import type { StatObjectOpts, UploadedObjectInfo, } from './internal/type.ts' -import { qs } from './qs.ts' import { signV4 } from './signing.ts' import * as transformers from './transformers.ts' import type { Part } from './xml-parsers.ts' @@ -1223,7 +1223,7 @@ export class TypedBase { } const method = 'GET' - const query = qs(getOpts) + const query = queryString.stringify(getOpts) return asCallback( cb, this.makeRequestAsync({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes), @@ -2043,7 +2043,7 @@ export async function uploadStream({ // now start to upload missing part const options: RequestOption = { method: 'PUT', - query: qs({ partNumber, uploadId }), + query: queryString.stringify({ partNumber, uploadId }), headers: { 'Content-Length': chunk.length, 'Content-MD5': md5.toString('base64'), From 09a74f2fe0ab2a23ed8878847b9b312c82c45c97 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 19 May 2023 06:14:25 +0800 Subject: [PATCH 71/78] tsc --- build.mjs | 2 +- src/CredentialProvider.ts | 5 +++ src/as-callback.ts | 2 +- src/assert.ts | 84 --------------------------------------- src/extensions.ts | 11 ++++- src/internal/helper.ts | 14 +++++++ src/postPolicy.ts | 3 +- src/streamify.ts | 30 -------------- src/transformers.ts | 2 +- src/typed-client.ts | 10 ++--- src/typed-client2.ts | 20 +++++----- src/typedBase.ts | 18 ++++----- 12 files changed, 55 insertions(+), 146 deletions(-) delete mode 100644 src/assert.ts delete mode 100644 src/streamify.ts diff --git a/build.mjs b/build.mjs index 0ab77f34..ff5e2979 100644 --- a/build.mjs +++ b/build.mjs @@ -1,7 +1,7 @@ /* eslint-disable no-console */ import { exec } from 'node:child_process' import * as fs from 'node:fs' -import * as fsp from 'node:fs/promises' +import { promises as fsp } from 'node:fs' import * as path from 'node:path' import { promisify } from 'node:util' diff --git a/src/CredentialProvider.ts b/src/CredentialProvider.ts index 98866ccb..ba224b33 100644 --- a/src/CredentialProvider.ts +++ b/src/CredentialProvider.ts @@ -47,3 +47,8 @@ export class CredentialProvider { return this.credentials.getSessionToken() } } + +// deprecated default export, please use named exports. +// keep for backward compatibility. +// eslint-disable-next-line import/no-default-export +export default CredentialProvider diff --git a/src/as-callback.ts b/src/as-callback.ts index c67b94a0..0382f8c4 100644 --- a/src/as-callback.ts +++ b/src/as-callback.ts @@ -1,4 +1,4 @@ -import { isFunction } from './assert.ts' +import { isFunction } from './internal/helper.ts' export function asCallback( cb: undefined | ((err: unknown | null, result: T) => void), diff --git a/src/assert.ts b/src/assert.ts deleted file mode 100644 index 51a34995..00000000 --- a/src/assert.ts +++ /dev/null @@ -1,84 +0,0 @@ -/** - * @internal - * - * assert js types - * - */ -import type * as stream from 'node:stream' - -import _ from 'lodash' - -/** - * check if typeof arg number - */ -export function isNumber(arg: unknown): arg is number { - return typeof arg === 'number' -} - -export type AnyFunction = (...args: any[]) => any - -/** - * check if typeof arg function - */ -export function isFunction(arg: unknown): arg is AnyFunction { - return typeof arg === 'function' -} - -/** - * check if typeof arg function or undefined - */ -export function isOptionalFunction(arg: unknown): arg is undefined | AnyFunction { - if (arg === undefined) { - return true - } - return typeof arg === 'function' -} - -/** - * check if typeof arg string - */ -export function isString(arg: unknown): arg is string { - return typeof arg === 'string' -} - -/** - * check if typeof arg object - */ -export function isObject(arg: unknown): arg is object { - return typeof arg === 'object' && arg !== null -} - -/** - * check if object is readable stream - */ -export function isReadableStream(arg: unknown): arg is stream.Readable { - // eslint-disable-next-line @typescript-eslint/unbound-method - return isObject(arg) && isFunction((arg as stream.Readable)._read) -} - -/** - * check if arg is boolean - */ -export function isBoolean(arg: unknown): arg is boolean { - return typeof arg === 'boolean' -} - -export function isEmpty(o: unknown): o is null | undefined { - return _.isEmpty(o) -} - -export function isEmptyObject(o: Record): boolean { - return Object.values(o).filter((x) => x !== undefined).length !== 0 -} - -/** - * check if arg is a valid date - */ -export function isValidDate(arg: unknown): arg is Date { - // @ts-expect-error TS(2345): Argument of type 'Date' is not assignable to param... Remove this comment to see the full error message - return arg instanceof Date && !isNaN(arg) -} - -export function isDefined(o: T): o is NonNullable { - return o !== null && o !== undefined -} diff --git a/src/extensions.ts b/src/extensions.ts index d30d051d..559d6daf 100644 --- a/src/extensions.ts +++ b/src/extensions.ts @@ -16,9 +16,16 @@ import * as stream from 'node:stream' -import { isBoolean, isNumber, isString } from './assert.ts' import * as errors from './errors.ts' -import { isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './internal/helper.ts' +import { + isBoolean, + isNumber, + isString, + isValidBucketName, + isValidPrefix, + pipesetup, + uriEscape, +} from './internal/helper.ts' import * as transformers from './transformers.ts' import type { Client } from './typed-client2.ts' diff --git a/src/internal/helper.ts b/src/internal/helper.ts index 98c15888..6a86b08b 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -234,6 +234,16 @@ export function isFunction(arg: unknown): arg is AnyFunction { return typeof arg === 'function' } +/** + * check if typeof arg function or undefined + */ +export function isOptionalFunction(arg: unknown): arg is undefined | AnyFunction { + if (arg === undefined) { + return true + } + return typeof arg === 'function' +} + /** * check if typeof arg string */ @@ -263,6 +273,10 @@ export function isBoolean(arg: unknown): arg is boolean { return typeof arg === 'boolean' } +export function isDefined(o: T): o is NonNullable { + return o !== null && o !== undefined +} + export function isEmpty(o: unknown): o is null | undefined { return _.isEmpty(o) } diff --git a/src/postPolicy.ts b/src/postPolicy.ts index d383b36c..d0156c91 100644 --- a/src/postPolicy.ts +++ b/src/postPolicy.ts @@ -1,7 +1,6 @@ // Build PostPolicy object that can be signed by presignedPostPolicy -import { isObject } from './assert.ts' import * as errors from './errors.ts' -import { isValidBucketName, isValidObjectName, isValidPrefix } from './internal/helper.ts' +import { isObject, isValidBucketName, isValidObjectName, isValidPrefix } from './internal/helper.ts' import type { ObjectMetaData } from './internal/type.ts' export class PostPolicy { diff --git a/src/streamify.ts b/src/streamify.ts deleted file mode 100644 index 02ab2d65..00000000 --- a/src/streamify.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as stream from 'node:stream' - -const Generator = async function* () {}.constructor - -export class StreamGenerators extends stream.Readable { - private _g: AsyncGenerator - - constructor(g: AsyncGeneratorFunction) { - if (!(g instanceof Generator)) { - throw new TypeError('First argument must be a ES6 Generator') - } - - super({ objectMode: true }) - this._g = g() - } - - async _read() { - try { - const { done, value } = await this._g.next() - - if (done) { - this.push(null) - } else { - this.push(value) - } - } catch (e) { - this.emit('error', e) - } - } -} diff --git a/src/transformers.ts b/src/transformers.ts index 9af86e81..4ba44460 100644 --- a/src/transformers.ts +++ b/src/transformers.ts @@ -20,8 +20,8 @@ import type * as stream from 'node:stream' import Through2 from 'through2' -import { isFunction } from './assert.ts' import * as errors from './errors.ts' +import { isFunction } from './internal/helper.ts' import * as xmlParsers from './xml-parsers.ts' // getConcater returns a stream that concatenates the input and emits diff --git a/src/typed-client.ts b/src/typed-client.ts index 03ea01c8..aa8939e2 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -1,4 +1,4 @@ -import * as fsp from 'node:fs/promises' +import { promises as fsp } from 'node:fs' import * as stream from 'node:stream' import queryString from 'query-string' @@ -6,6 +6,9 @@ import { TextEncoder } from 'web-encoding' import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' +import * as errors from './errors.ts' +import type { SelectResults } from './helpers.ts' +import { LEGAL_HOLD_STATUS, RETENTION_MODES } from './helpers.ts' import { isBoolean, isEmpty, @@ -15,10 +18,7 @@ import { isOptionalFunction, isString, isValidDate, -} from './assert.ts' -import * as errors from './errors.ts' -import type { SelectResults } from './helpers.ts' -import { LEGAL_HOLD_STATUS, RETENTION_MODES } from './helpers.ts' +} from './internal/helper.ts' import { getScope, insertContentType, diff --git a/src/typed-client2.ts b/src/typed-client2.ts index 4e456916..4bf2a8bd 100644 --- a/src/typed-client2.ts +++ b/src/typed-client2.ts @@ -1,4 +1,4 @@ -import * as fsp from 'node:fs/promises' +import { promises as fsp } from 'node:fs' import * as stream from 'node:stream' import async from 'async' @@ -6,16 +6,6 @@ import _ from 'lodash' import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' -import { - isBoolean, - isEmpty, - isFunction, - isNumber, - isObject, - isOptionalFunction, - isReadableStream, - isString, -} from './assert.ts' import { CopyConditions } from './copyConditions.ts' import * as errors from './errors.ts' import { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' @@ -25,6 +15,14 @@ import { extractMetadata, getSourceVersionId, getVersionId, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, isValidBucketName, isValidObjectName, isValidPrefix, diff --git a/src/typedBase.ts b/src/typedBase.ts index 1542d53f..77690534 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -1,6 +1,6 @@ import * as crypto from 'node:crypto' import * as fs from 'node:fs' -import * as fsp from 'node:fs/promises' +import { promises as fsp } from 'node:fs' import type { IncomingMessage } from 'node:http' import * as http from 'node:http' import * as https from 'node:https' @@ -16,7 +16,13 @@ import queryString from 'query-string' import xml2js from 'xml2js' import { asCallback, asCallbackFn } from './as-callback.ts' -import type { AnyFunction } from './assert.ts' +import { CredentialProvider } from './CredentialProvider.ts' +import * as errors from './errors.ts' +import { S3Error } from './errors.ts' +import { extensions } from './extensions.ts' +import { DEFAULT_REGION } from './helpers.ts' +import { streamPromise } from './internal/async.ts' +import type { AnyFunction } from './internal/helper.ts' import { isBoolean, isDefined, @@ -27,13 +33,7 @@ import { isOptionalFunction, isReadableStream, isString, -} from './assert.ts' -import { CredentialProvider } from './CredentialProvider.ts' -import * as errors from './errors.ts' -import { S3Error } from './errors.ts' -import { extensions } from './extensions.ts' -import { DEFAULT_REGION } from './helpers.ts' -import { streamPromise } from './internal/async.ts' +} from './internal/helper.ts' import { extractMetadata, getVersionId, From 08b999b6bcf5028a916b0912d946cf684a464913 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 19 May 2023 06:18:53 +0800 Subject: [PATCH 72/78] fix fs/promise in nodejs 12 --- build.mjs | 2 +- src/internal/async.ts | 2 ++ src/typed-client.ts | 2 +- src/typedBase.ts | 3 +-- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/build.mjs b/build.mjs index ff5e2979..0ab77f34 100644 --- a/build.mjs +++ b/build.mjs @@ -1,7 +1,7 @@ /* eslint-disable no-console */ import { exec } from 'node:child_process' import * as fs from 'node:fs' -import { promises as fsp } from 'node:fs' +import * as fsp from 'node:fs/promises' import * as path from 'node:path' import { promisify } from 'node:util' diff --git a/src/internal/async.ts b/src/internal/async.ts index 1f23f715..2532dd59 100644 --- a/src/internal/async.ts +++ b/src/internal/async.ts @@ -4,6 +4,8 @@ import * as fs from 'node:fs' import * as stream from 'node:stream' import { promisify } from 'node:util' +// TODO: use "node:fs/promise" directly after we stop testing on nodejs 12 +export { promises as fsp } from 'node:fs' export const streamPromise = { // node:stream/promises Added in: v15.0.0 pipeline: promisify(stream.pipeline), diff --git a/src/typed-client.ts b/src/typed-client.ts index aa8939e2..08f89fee 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -1,4 +1,3 @@ -import { promises as fsp } from 'node:fs' import * as stream from 'node:stream' import queryString from 'query-string' @@ -9,6 +8,7 @@ import { asCallback, asCallbackFn } from './as-callback.ts' import * as errors from './errors.ts' import type { SelectResults } from './helpers.ts' import { LEGAL_HOLD_STATUS, RETENTION_MODES } from './helpers.ts' +import { fsp } from './internal/async.ts' import { isBoolean, isEmpty, diff --git a/src/typedBase.ts b/src/typedBase.ts index 77690534..90567b83 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -1,6 +1,5 @@ import * as crypto from 'node:crypto' import * as fs from 'node:fs' -import { promises as fsp } from 'node:fs' import type { IncomingMessage } from 'node:http' import * as http from 'node:http' import * as https from 'node:https' @@ -21,7 +20,7 @@ import * as errors from './errors.ts' import { S3Error } from './errors.ts' import { extensions } from './extensions.ts' import { DEFAULT_REGION } from './helpers.ts' -import { streamPromise } from './internal/async.ts' +import { fsp, streamPromise } from './internal/async.ts' import type { AnyFunction } from './internal/helper.ts' import { isBoolean, From a99fc6eda93126a0674ece4e7a259172e5006bbd Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 19 May 2023 06:38:26 +0800 Subject: [PATCH 73/78] dupe code --- src/copyConditions.ts | 37 ----------- src/{ => internal}/as-callback.ts | 2 +- src/{ => internal}/extensions.ts | 18 ++---- src/minio.ts | 4 +- src/postPolicy.ts | 104 ------------------------------ src/typed-client.ts | 4 +- src/typed-client2.ts | 4 +- src/typedBase.ts | 10 +-- 8 files changed, 17 insertions(+), 166 deletions(-) delete mode 100644 src/copyConditions.ts rename src/{ => internal}/as-callback.ts (93%) rename src/{ => internal}/extensions.ts (95%) delete mode 100644 src/postPolicy.ts diff --git a/src/copyConditions.ts b/src/copyConditions.ts deleted file mode 100644 index 25d00331..00000000 --- a/src/copyConditions.ts +++ /dev/null @@ -1,37 +0,0 @@ -export class CopyConditions { - public modified: string - public unmodified: string - public matchETag: string - public matchETagExcept: string - - constructor() { - this.modified = '' - this.unmodified = '' - this.matchETag = '' - this.matchETagExcept = '' - } - - setModified(date: Date): void { - if (!(date instanceof Date)) { - throw new TypeError('date must be of type Date') - } - - this.modified = date.toUTCString() - } - - setUnmodified(date: Date): void { - if (!(date instanceof Date)) { - throw new TypeError('date must be of type Date') - } - - this.unmodified = date.toUTCString() - } - - setMatchETag(etag: string): void { - this.matchETag = etag - } - - setMatchETagExcept(etag: string): void { - this.matchETagExcept = etag - } -} diff --git a/src/as-callback.ts b/src/internal/as-callback.ts similarity index 93% rename from src/as-callback.ts rename to src/internal/as-callback.ts index 0382f8c4..e08337e1 100644 --- a/src/as-callback.ts +++ b/src/internal/as-callback.ts @@ -1,4 +1,4 @@ -import { isFunction } from './internal/helper.ts' +import { isFunction } from './helper.ts' export function asCallback( cb: undefined | ((err: unknown | null, result: T) => void), diff --git a/src/extensions.ts b/src/internal/extensions.ts similarity index 95% rename from src/extensions.ts rename to src/internal/extensions.ts index 559d6daf..2c00b526 100644 --- a/src/extensions.ts +++ b/src/internal/extensions.ts @@ -16,23 +16,15 @@ import * as stream from 'node:stream' -import * as errors from './errors.ts' -import { - isBoolean, - isNumber, - isString, - isValidBucketName, - isValidPrefix, - pipesetup, - uriEscape, -} from './internal/helper.ts' -import * as transformers from './transformers.ts' -import type { Client } from './typed-client2.ts' +import * as errors from '../errors.ts' +import * as transformers from '../transformers.ts' +import type { Client } from '../typed-client2.ts' +import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helper.ts' // TODO type S3Object = unknown -export class extensions { +export class Extensions { constructor(readonly client: Client) {} // List the objects in the bucket using S3 ListObjects V2 With Metadata diff --git a/src/minio.ts b/src/minio.ts index 925996fc..7471a125 100644 --- a/src/minio.ts +++ b/src/minio.ts @@ -15,10 +15,11 @@ */ export { AssumeRoleProvider } from './AssumeRoleProvider.ts' -export { CopyConditions } from './copyConditions.ts' export { CredentialProvider } from './CredentialProvider.ts' export { Credentials } from './Credentials.ts' export { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, ENCRYPTION_TYPES } from './helpers.ts' +export { CopyConditions } from './internal/copy-conditions.ts' +export { PostPolicy } from './internal/post-policy.ts' export type { NotificationEvent, NotificationRecord } from './notification.ts' export { buildARN, @@ -28,5 +29,4 @@ export { QueueConfig, TopicConfig, } from './notification.ts' -export { PostPolicy } from './postPolicy.ts' export { Client } from './typed-client2.ts' diff --git a/src/postPolicy.ts b/src/postPolicy.ts deleted file mode 100644 index d0156c91..00000000 --- a/src/postPolicy.ts +++ /dev/null @@ -1,104 +0,0 @@ -// Build PostPolicy object that can be signed by presignedPostPolicy -import * as errors from './errors.ts' -import { isObject, isValidBucketName, isValidObjectName, isValidPrefix } from './internal/helper.ts' -import type { ObjectMetaData } from './internal/type.ts' - -export class PostPolicy { - public policy: { conditions: (string | number)[][]; expiration?: string } - public formData: Record - - constructor() { - this.policy = { - conditions: [], - } - this.formData = {} - } - - // set expiration date - setExpires(date: Date) { - if (!date) { - throw new errors.InvalidDateError('Invalid date: cannot be null') - } - this.policy.expiration = date.toISOString() - } - - // set object name - setKey(objectName: string) { - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name : ${objectName}`) - } - this.policy.conditions.push(['eq', '$key', objectName]) - this.formData.key = objectName - } - - // set object name prefix, i.e policy allows any keys with this prefix - setKeyStartsWith(prefix: string) { - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - this.policy.conditions.push(['starts-with', '$key', prefix]) - this.formData.key = prefix - } - - // set bucket name - setBucket(bucketName: string) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) - } - this.policy.conditions.push(['eq', '$bucket', bucketName]) - this.formData.bucket = bucketName - } - - // set Content-Type - setContentType(type: string) { - if (!type) { - throw new Error('content-type cannot be null') - } - this.policy.conditions.push(['eq', '$Content-Type', type]) - this.formData['Content-Type'] = type - } - - // set Content-Type prefix, i.e image/ allows any image - setContentTypeStartsWith(prefix: string) { - if (!prefix) { - throw new Error('content-type cannot be null') - } - this.policy.conditions.push(['starts-with', '$Content-Type', prefix]) - this.formData['Content-Type'] = prefix - } - - // set Content-Disposition - setContentDisposition(value: string) { - if (!value) { - throw new Error('content-disposition cannot be null') - } - this.policy.conditions.push(['eq', '$Content-Disposition', value]) - this.formData['Content-Disposition'] = value - } - - // set minimum/maximum length of what Content-Length can be. - setContentLengthRange(min: number, max: number) { - if (min > max) { - throw new Error('min cannot be more than max') - } - if (min < 0) { - throw new Error('min should be > 0') - } - if (max < 0) { - throw new Error('max should be > 0') - } - this.policy.conditions.push(['content-length-range', min, max]) - } - - // set user defined metadata - setUserMetaData(metaData: ObjectMetaData) { - if (!isObject(metaData)) { - throw new TypeError('metadata should be of type "object"') - } - Object.entries(metaData).forEach(([key, value]) => { - const amzMetaDataKey = `x-amz-meta-${key}` - this.policy.conditions.push(['eq', `$${amzMetaDataKey}`, value]) - this.formData[amzMetaDataKey] = value.toString() - }) - } -} diff --git a/src/typed-client.ts b/src/typed-client.ts index 08f89fee..16f2716d 100644 --- a/src/typed-client.ts +++ b/src/typed-client.ts @@ -4,10 +4,10 @@ import queryString from 'query-string' import { TextEncoder } from 'web-encoding' import xml2js from 'xml2js' -import { asCallback, asCallbackFn } from './as-callback.ts' import * as errors from './errors.ts' import type { SelectResults } from './helpers.ts' import { LEGAL_HOLD_STATUS, RETENTION_MODES } from './helpers.ts' +import { asCallback, asCallbackFn } from './internal/as-callback.ts' import { fsp } from './internal/async.ts' import { isBoolean, @@ -30,6 +30,7 @@ import { toMd5, uriEscape, } from './internal/helper.ts' +import { PostPolicy } from './internal/post-policy.ts' import { readAsBuffer } from './internal/response.ts' import type { BucketStream, @@ -52,7 +53,6 @@ import type { VersionIdentification, VersioningConfig, } from './internal/type.ts' -import { PostPolicy } from './postPolicy.ts' import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' import * as transformers from './transformers.ts' import type { RequestMethod, RequestOption } from './typedBase.ts' diff --git a/src/typed-client2.ts b/src/typed-client2.ts index 4bf2a8bd..4971f64c 100644 --- a/src/typed-client2.ts +++ b/src/typed-client2.ts @@ -5,11 +5,11 @@ import async from 'async' import _ from 'lodash' import xml2js from 'xml2js' -import { asCallback, asCallbackFn } from './as-callback.ts' -import { CopyConditions } from './copyConditions.ts' import * as errors from './errors.ts' import { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' +import { asCallback, asCallbackFn } from './internal/as-callback.ts' import { fstat } from './internal/async.ts' +import { CopyConditions } from './internal/copy-conditions.ts' import { calculateEvenSplits, extractMetadata, diff --git a/src/typedBase.ts b/src/typedBase.ts index 90567b83..0809505d 100644 --- a/src/typedBase.ts +++ b/src/typedBase.ts @@ -14,13 +14,13 @@ import * as querystring from 'query-string' import queryString from 'query-string' import xml2js from 'xml2js' -import { asCallback, asCallbackFn } from './as-callback.ts' import { CredentialProvider } from './CredentialProvider.ts' import * as errors from './errors.ts' import { S3Error } from './errors.ts' -import { extensions } from './extensions.ts' import { DEFAULT_REGION } from './helpers.ts' +import { asCallback, asCallbackFn } from './internal/as-callback.ts' import { fsp, streamPromise } from './internal/async.ts' +import { Extensions } from './internal/extensions.ts' import type { AnyFunction } from './internal/helper.ts' import { isBoolean, @@ -162,9 +162,9 @@ export class TypedBase { protected s3AccelerateEndpoint?: string protected reqOptions: Record - private readonly clientExtensions: extensions + private readonly clientExtensions: Extensions private logStream?: stream.Writable - private transportAgent: http.Agent + private readonly transportAgent: http.Agent constructor(params: ClientOptions) { // @ts-expect-error deprecated property @@ -298,7 +298,7 @@ export class TypedBase { // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore - this.clientExtensions = new extensions(this) + this.clientExtensions = new Extensions(this) } /** From f659a06447a5929cc71eae0aa9095c3193bca7a9 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 19 May 2023 06:45:24 +0800 Subject: [PATCH 74/78] diff --- src/internal/type.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/internal/type.ts b/src/internal/type.ts index 65c1662d..520fcfc2 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -9,6 +9,8 @@ export type ResponseHeader = Record export type ObjectMetaData = Record +export type RequestHeaders = Record + export type Encryption = | { type: ENCRYPTION_TYPES.SSEC @@ -46,7 +48,6 @@ export enum LEGAL_HOLD_STATUS { } export type Transport = typeof http | typeof https -export type RequestHeaders = Record export interface UploadedObjectInfo { etag: string From 04607399e0851764ecc944fdf9533974732864c0 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Fri, 19 May 2023 06:48:20 +0800 Subject: [PATCH 75/78] diff --- src/internal/s3-endpoints.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/internal/s3-endpoints.ts b/src/internal/s3-endpoints.ts index b8e8f15f..141f08ff 100644 --- a/src/internal/s3-endpoints.ts +++ b/src/internal/s3-endpoints.ts @@ -35,7 +35,7 @@ const awsS3Endpoint = { 'ap-east-1': 's3.ap-east-1.amazonaws.com', 'eu-north-1': 's3.eu-north-1.amazonaws.com', // Add new endpoints here. -} as const +} export type Region = keyof typeof awsS3Endpoint | string @@ -49,6 +49,5 @@ export function getS3Endpoint(region: Region): string { if (endpoint) { return endpoint } - return 's3.amazonaws.com' } From 9e5f09a01c32b02279b6cdb9ea3f69624fe4621c Mon Sep 17 00:00:00 2001 From: Trim21 Date: Mon, 22 May 2023 04:38:59 +0800 Subject: [PATCH 76/78] merge --- src/internal/type.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/internal/type.ts b/src/internal/type.ts index 520fcfc2..feacce3b 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -1,5 +1,4 @@ import type * as http from 'node:http' -import type * as https from 'node:https' import type { Readable as ReadableStream } from 'node:stream' export type Binary = string | Buffer @@ -47,7 +46,7 @@ export enum LEGAL_HOLD_STATUS { DISABLED = 'OFF', } -export type Transport = typeof http | typeof https +export type Transport = Pick export interface UploadedObjectInfo { etag: string From 2781c16b5a2f98afa25926b8d3ac603215805f6e Mon Sep 17 00:00:00 2001 From: Trim21 Date: Mon, 22 May 2023 04:42:39 +0800 Subject: [PATCH 77/78] diff --- src/internal/helper.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/internal/helper.ts b/src/internal/helper.ts index 6a86b08b..c91474db 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -273,10 +273,6 @@ export function isBoolean(arg: unknown): arg is boolean { return typeof arg === 'boolean' } -export function isDefined(o: T): o is NonNullable { - return o !== null && o !== undefined -} - export function isEmpty(o: unknown): o is null | undefined { return _.isEmpty(o) } @@ -285,6 +281,10 @@ export function isEmptyObject(o: Record): boolean { return Object.values(o).filter((x) => x !== undefined).length !== 0 } +export function isDefined(o: T): o is NonNullable { + return o !== null && o !== undefined +} + /** * check if arg is a valid date */ From 99386cd4f3ba02212f9bc97c26695f6e58056322 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Mon, 6 Nov 2023 01:20:33 +0800 Subject: [PATCH 78/78] fix --- src/internal/helper.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/internal/helper.ts b/src/internal/helper.ts index c1677ba9..3bc0c096 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -281,7 +281,7 @@ export function isEmptyObject(o: Record): boolean { return Object.values(o).filter((x) => x !== undefined).length !== 0 } -export function isDefined(o: T): o is NonNullable { +export function isDefined(o: T): o is Exclude { return o !== null && o !== undefined }