diff --git a/package-lock.json b/package-lock.json index 58437108..d23677a1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,13 +12,13 @@ "async": "^3.2.4", "block-stream2": "^2.1.0", "browser-or-node": "^2.1.1", - "buffer-crc32": "^0.2.13", + "crc-32": "^1.2.2", "fast-xml-parser": "^4.2.2", "ipaddr.js": "^2.0.1", - "json-stream": "^1.0.0", "lodash": "^4.17.21", "mime-types": "^2.1.35", - "query-string": "^7.1.3", + "query-string": "^7.1.1", + "stream-json": "^1.7.5", "through2": "^4.0.2", "web-encoding": "^1.1.5", "xml": "^1.0.1", @@ -32,9 +32,11 @@ "@babel/register": "^7.21.0", "@nodelib/fs.walk": "^1.2.8", "@types/async": "^3.2.20", + "@types/block-stream2": "^2.1.0", "@types/lodash": "^4.14.194", "@types/mime-types": "^2.1.1", "@types/node": "^20.1.0", + "@types/stream-json": "^1.7.3", "@types/through2": "^2.0.38", "@types/xml": "^1.0.8", "@types/xml2js": "^0.4.11", @@ -52,6 +54,7 @@ "eslint-plugin-simple-import-sort": "^10.0.0", "eslint-plugin-unicorn": "^47.0.0", "eslint-plugin-unused-imports": "^2.0.0", + "eventemitter3": "^5.0.1", "husky": "^8.0.3", "lint-staged": "^13.2.2", "mocha": "^10.2.0", @@ -2227,6 +2230,15 @@ "integrity": "sha512-6jSBQQugzyX1aWto0CbvOnmxrU9tMoXfA9gc4IrLEtvr3dTwSg5GLGoWiZnGLI6UG/kqpB3JOQKQrqnhUWGKQA==", "dev": true }, + "node_modules/@types/block-stream2": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/block-stream2/-/block-stream2-2.1.0.tgz", + "integrity": "sha512-ue1bw4ZKeWIudQfKFvKAudFwpZ1Co1DzUCFxeJWnYGnpiGGZ9SU4gNb9NCSVctZ64W/L4SSVYM77nuhZZ8V0Ew==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/json-schema": { "version": "7.0.11", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", @@ -2269,6 +2281,25 @@ "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", "dev": true }, + "node_modules/@types/stream-chain": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stream-chain/-/stream-chain-2.0.1.tgz", + "integrity": "sha512-D+Id9XpcBpampptkegH7WMsEk6fUdf9LlCIX7UhLydILsqDin4L0QT7ryJR0oycwC7OqohIzdfcMHVZ34ezNGg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/stream-json": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@types/stream-json/-/stream-json-1.7.3.tgz", + "integrity": "sha512-Jqsyq5VPOTWorvEmzWhEWH5tJnHA+bB8vt/Zzb11vSDj8esfSHDMj2rbVjP0mfJQzl3YBJSXBBq08iiyaBK3KA==", + "dev": true, + "dependencies": { + "@types/node": "*", + "@types/stream-chain": "*" + } + }, "node_modules/@types/through2": { "version": "2.0.38", "resolved": "https://registry.npmjs.org/@types/through2/-/through2-2.0.38.tgz", @@ -2996,15 +3027,6 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, - "node_modules/buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "license": "MIT", - "engines": { - "node": "*" - } - }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", @@ -3380,6 +3402,17 @@ "url": "https://opencollective.com/core-js" } }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -4183,6 +4216,12 @@ "node": ">=0.10.0" } }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "dev": true + }, "node_modules/execa": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-7.1.1.tgz", @@ -4430,20 +4469,6 @@ "dev": true, "license": "ISC" }, - "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, "node_modules/function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -5354,12 +5379,6 @@ "dev": true, "license": "MIT" }, - "node_modules/json-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-stream/-/json-stream-1.0.0.tgz", - "integrity": "sha512-H/ZGY0nIAg3QcOwE1QN/rK/Fa7gJn7Ii5obwp6zyPO4xiPNwpIMjqy2gwjBEGqzkF/vSWEIBQCBuN19hYiL6Qg==", - "license": "MIT" - }, "node_modules/json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", @@ -7154,6 +7173,19 @@ "node": ">=6" } }, + "node_modules/stream-chain": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/stream-chain/-/stream-chain-2.2.5.tgz", + "integrity": "sha512-1TJmBx6aSWqZ4tx7aTpBDXK0/e2hhcNSTV8+CbFJtDjbb+I1mZ8lHit0Grw9GRT+6JbIrrDd8esncgBi8aBXGA==" + }, + "node_modules/stream-json": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/stream-json/-/stream-json-1.7.5.tgz", + "integrity": "sha512-NSkoVduGakxZ8a+pTPUlcGEeAGQpWL9rKJhOFCV+J/QtdQUEU5vtBgVg6eJXn8JB8RZvpbJWZGvXkhz70MLWoA==", + "dependencies": { + "stream-chain": "^2.2.5" + } + }, "node_modules/strict-uri-encode": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", diff --git a/package.json b/package.json index 8e73cf98..a7e5f58a 100644 --- a/package.json +++ b/package.json @@ -85,13 +85,13 @@ "async": "^3.2.4", "block-stream2": "^2.1.0", "browser-or-node": "^2.1.1", - "buffer-crc32": "^0.2.13", + "crc-32": "^1.2.2", "fast-xml-parser": "^4.2.2", "ipaddr.js": "^2.0.1", - "json-stream": "^1.0.0", "lodash": "^4.17.21", "mime-types": "^2.1.35", - "query-string": "^7.1.3", + "query-string": "^7.1.1", + "stream-json": "^1.7.5", "through2": "^4.0.2", "web-encoding": "^1.1.5", "xml": "^1.0.1", @@ -105,9 +105,11 @@ "@babel/register": "^7.21.0", "@nodelib/fs.walk": "^1.2.8", "@types/async": "^3.2.20", + "@types/block-stream2": "^2.1.0", "@types/lodash": "^4.14.194", "@types/mime-types": "^2.1.1", "@types/node": "^20.1.0", + "@types/stream-json": "^1.7.3", "@types/through2": "^2.0.38", "@types/xml": "^1.0.8", "@types/xml2js": "^0.4.11", @@ -125,6 +127,7 @@ "eslint-plugin-simple-import-sort": "^10.0.0", "eslint-plugin-unicorn": "^47.0.0", "eslint-plugin-unused-imports": "^2.0.0", + "eventemitter3": "^5.0.1", "husky": "^8.0.3", "lint-staged": "^13.2.2", "mocha": "^10.2.0", diff --git a/src/errors.ts b/src/errors.ts index 8285c668..9aca3e35 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -114,6 +114,7 @@ export class InvalidXMLError extends ExtendableError {} */ export class S3Error extends ExtendableError { code?: string + region?: string } export class IsValidBucketNameError extends ExtendableError {} diff --git a/src/internal/as-callback.ts b/src/internal/as-callback.ts new file mode 100644 index 00000000..e08337e1 --- /dev/null +++ b/src/internal/as-callback.ts @@ -0,0 +1,32 @@ +import { isFunction } from './helper.ts' + +export function asCallback( + cb: undefined | ((err: unknown | null, result: T) => void), + promise: Promise, +): Promise | void { + if (cb === undefined) { + return promise + } + + if (!isFunction(cb)) { + throw new TypeError(`callback should be of type "function", got ${cb}`) + } + + promise.then( + (result) => { + cb(null, result) + }, + (err) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + cb(err) + }, + ) +} + +export function asCallbackFn( + cb: undefined | ((err: unknown | null, result: T) => void), + asyncFn: () => Promise, +): Promise | void { + return asCallback(cb, asyncFn()) +} diff --git a/src/internal/async.ts b/src/internal/async.ts new file mode 100644 index 00000000..2532dd59 --- /dev/null +++ b/src/internal/async.ts @@ -0,0 +1,14 @@ +// promise helper for stdlib + +import * as fs from 'node:fs' +import * as stream from 'node:stream' +import { promisify } from 'node:util' + +// TODO: use "node:fs/promise" directly after we stop testing on nodejs 12 +export { promises as fsp } from 'node:fs' +export const streamPromise = { + // node:stream/promises Added in: v15.0.0 + pipeline: promisify(stream.pipeline), +} + +export const fstat = promisify(fs.fstat) diff --git a/src/internal/helper.ts b/src/internal/helper.ts index 1660bbe2..3bc0c096 100644 --- a/src/internal/helper.ts +++ b/src/internal/helper.ts @@ -234,6 +234,16 @@ export function isFunction(arg: unknown): arg is AnyFunction { return typeof arg === 'function' } +/** + * check if typeof arg function or undefined + */ +export function isOptionalFunction(arg: unknown): arg is undefined | AnyFunction { + if (arg === undefined) { + return true + } + return typeof arg === 'function' +} + /** * check if typeof arg string */ diff --git a/src/internal/type.ts b/src/internal/type.ts index 55b3cfb9..15a8fd58 100644 --- a/src/internal/type.ts +++ b/src/internal/type.ts @@ -14,6 +14,41 @@ export type ObjectMetaData = Record export type RequestHeaders = Record +export interface LifecycleConfig { + Rule: LifecycleRule[] +} + +export interface LifecycleRule { + [key: string]: any +} + +export type UploadID = string +export type NoResultCallback = (error: unknown | null) => void +export type TagList = Record +export type VersionIdentification = { versionId?: string } +export type Lifecycle = LifecycleConfig | null | '' +export type Lock = LockConfig | EmptyObject +export type Retention = RetentionOptions | EmptyObject +export type IsoDate = string + +export type GetObjectOpt = { + versionId?: string +} + +export interface RetentionOptions { + versionId: string + mode?: RETENTION_MODES + retainUntilDate?: IsoDate + governanceBypass?: boolean +} + +export interface LockConfig { + objectLockEnabled?: 'Enabled' + mode: LEGAL_HOLD_STATUS + unit: RETENTION_VALIDITY_UNITS + validity: number +} + export type Encryption = | { type: ENCRYPTION_TYPES.SSEC @@ -25,6 +60,7 @@ export type Encryption = } export type EnabledOrDisabledStatus = 'Enabled' | 'Disabled' + export enum ENCRYPTION_TYPES { /** * SSEC represents server-side-encryption with customer provided keys @@ -53,6 +89,11 @@ export enum LEGAL_HOLD_STATUS { export type Transport = Pick +export interface UploadedObjectInfo { + etag: string + versionId: string | null +} + export interface IRequest { protocol: string port?: number | string @@ -124,6 +165,13 @@ export interface BucketStream extends ReadableStream { on(event: string | symbol, listener: (...args: any[]) => void): this } +export interface PostPolicyResult { + postURL: string + formData: { + [key: string]: any + } +} + export interface BucketItemStat { size: number etag: string @@ -146,6 +194,19 @@ export type Tag = { Value: string } +export interface EncryptionConfig { + Rule?: EncryptionRule[] +} + +export interface EncryptionRule { + [key: string]: any +} + +export interface LegalHoldOptions { + versionId?: string + status: LEGAL_HOLD_STATUS +} + export type ReplicationRuleDestination = { Bucket: string StorageClass: string @@ -173,6 +234,45 @@ export type ExistingObjectReplication = { Status: ReplicationRuleStatus } +export interface InputSerialization { + CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' + CSV?: { + AllowQuotedRecordDelimiter?: boolean + Comments?: string + FieldDelimiter?: string + FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' + QuoteCharacter?: string + QuoteEscapeCharacter?: string + RecordDelimiter?: string + } + JSON?: { + Type: 'DOCUMENT' | 'LINES' + } + Parquet?: EmptyObject +} + +export interface OutputSerialization { + CSV?: { + FieldDelimiter?: string + QuoteCharacter?: string + QuoteEscapeCharacter?: string + QuoteFields?: string + RecordDelimiter?: string + } + JSON?: { + RecordDelimiter?: string + } +} + +export interface SelectOptions { + expression: string + expressionType?: string + inputSerialization: InputSerialization + outputSerialization: OutputSerialization + requestProgress?: { Enabled: boolean } + scanRange?: { Start: number; End: number } +} + export type ReplicationRule = { ID: string Status: ReplicationRuleStatus @@ -195,7 +295,7 @@ export type ReplicationConfig = { } /* Replication Config types */ -export type ResultCallback = (error: Error | null, result: T) => void +export type ResultCallback = (error: unknown | null, result: T) => void export type GetObjectLegalHoldOptions = { versionId: string @@ -235,3 +335,42 @@ export type ObjectLockConfigParam = { } | EmptyObject } + +export interface SourceObjectStats { + size: number + metaData: string + lastModicied: Date + versionId: string + etag: string +} + +export interface MakeBucketOpt { + ObjectLocking?: boolean +} + +export interface RemoveOptions { + versionId?: string + forceDelete?: boolean + governanceBypass?: boolean +} + +export interface BucketItemFromList { + name: string + // date when bucket was created + creationDate: Date +} + +export type VersioningConfig = Record + +export interface VersionConfigInput { + Status?: string + MfaDelete?: string + + [key: string]: any +} + +export type ListObjectV1Opt = { + Delimiter?: string + MaxKeys?: number + IncludeVersion?: boolean +} diff --git a/src/minio.d.ts b/src/minio.d.ts deleted file mode 100644 index 6d5c4ab7..00000000 --- a/src/minio.d.ts +++ /dev/null @@ -1,568 +0,0 @@ -// imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/93cfb0ec069731dcdfc31464788613f7cddb8192/types/minio/index.d.ts -/* eslint-disable @typescript-eslint/no-explicit-any */ - -import { EventEmitter } from 'node:events' -import type { Readable as ReadableStream } from 'node:stream' - -import type { - CopyDestinationOptions, - CopySourceOptions, - LEGAL_HOLD_STATUS, - RETENTION_MODES, - RETENTION_VALIDITY_UNITS, -} from './helpers.ts' -import type { ClientOptions, NoResultCallback, RemoveOptions } from './internal/client.ts' -import { TypedClient } from './internal/client.ts' -import { CopyConditions } from './internal/copy-conditions.ts' -import { PostPolicy } from './internal/post-policy.ts' -import type { Region } from './internal/s3-endpoints.ts' -import type { - BucketItem, - BucketItemCopy, - BucketItemFromList, - BucketItemStat, - BucketItemWithMetadata, - BucketStream, - ExistingObjectReplication, - GetObjectLegalHoldOptions, - IncompleteUploadedBucketItem, - ItemBucketMetadata, - ItemBucketMetadataList, - MetadataItem, - ObjectLockInfo, - PutObjectLegalHoldOptions, - ReplicaModifications, - ReplicationConfig, - ReplicationConfigOpts, - ReplicationRule, - ReplicationRuleAnd, - ReplicationRuleDestination, - ReplicationRuleFilter, - ReplicationRuleStatus, - ResultCallback, - SourceSelectionCriteria, - Tag, -} from './internal/type.ts' - -export * from './helpers.ts' -export type { Region } from './internal/s3-endpoints.ts' -export { CopyConditions, PostPolicy } -export type { - BucketItem, - BucketItemCopy, - BucketItemFromList, - BucketItemStat, - BucketItemWithMetadata, - BucketStream, - ClientOptions, - ExistingObjectReplication, - GetObjectLegalHoldOptions, - IncompleteUploadedBucketItem, - ItemBucketMetadata, - ItemBucketMetadataList, - MetadataItem, - NoResultCallback, - ObjectLockInfo, - PutObjectLegalHoldOptions, - RemoveOptions, - ReplicaModifications, - ReplicationConfig, - ReplicationConfigOpts, - ReplicationRule, - ReplicationRuleAnd, - ReplicationRuleDestination, - ReplicationRuleFilter, - ReplicationRuleStatus, - SourceSelectionCriteria, - Tag, -} - -// Exports only from typings -export type NotificationEvent = - | 's3:ObjectCreated:*' - | 's3:ObjectCreated:Put' - | 's3:ObjectCreated:Post' - | 's3:ObjectCreated:Copy' - | 's3:ObjectCreated:CompleteMultipartUpload' - | 's3:ObjectRemoved:*' - | 's3:ObjectRemoved:Delete' - | 's3:ObjectRemoved:DeleteMarkerCreated' - | 's3:ReducedRedundancyLostObject' - | 's3:TestEvent' - | 's3:ObjectRestore:Post' - | 's3:ObjectRestore:Completed' - | 's3:Replication:OperationFailedReplication' - | 's3:Replication:OperationMissedThreshold' - | 's3:Replication:OperationReplicatedAfterThreshold' - | 's3:Replication:OperationNotTracked' - | string - -/** - * @deprecated keep for backward compatible, use `RETENTION_MODES` instead - */ -export type Mode = RETENTION_MODES - -/** - * @deprecated keep for backward compatible - */ -export type LockUnit = RETENTION_VALIDITY_UNITS - -/** - * @deprecated keep for backward compatible - */ -export type LegalHoldStatus = LEGAL_HOLD_STATUS -export type VersioningConfig = Record -export type TagList = Record -export type EmptyObject = Record -export type VersionIdentificator = Pick -export type Lifecycle = LifecycleConfig | null | '' -export type Encryption = EncryptionConfig | EmptyObject -export type Retention = RetentionOptions | EmptyObject -export type IsoDate = string - -export interface PostPolicyResult { - postURL: string - formData: { - [key: string]: any - } -} - -export interface UploadedObjectInfo { - etag: string - versionId: string | null -} - -export interface LifecycleConfig { - Rule: LifecycleRule[] -} - -export interface LifecycleRule { - [key: string]: any -} - -export interface LockConfig { - mode: RETENTION_MODES - unit: RETENTION_VALIDITY_UNITS - validity: number -} - -export interface EncryptionConfig { - Rule: EncryptionRule[] -} - -export interface EncryptionRule { - [key: string]: any -} - -export interface RetentionOptions { - versionId: string - mode?: RETENTION_MODES - retainUntilDate?: IsoDate - governanceBypass?: boolean -} - -export interface LegalHoldOptions { - versionId: string - status: LEGAL_HOLD_STATUS -} - -export interface InputSerialization { - CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' - CSV?: { - AllowQuotedRecordDelimiter?: boolean - Comments?: string - FieldDelimiter?: string - FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' - QuoteCharacter?: string - QuoteEscapeCharacter?: string - RecordDelimiter?: string - } - JSON?: { - Type: 'DOCUMENT' | 'LINES' - } - Parquet?: EmptyObject -} - -export interface OutputSerialization { - CSV?: { - FieldDelimiter?: string - QuoteCharacter?: string - QuoteEscapeCharacter?: string - QuoteFields?: string - RecordDelimiter?: string - } - JSON?: { - RecordDelimiter?: string - } -} - -export interface SelectOptions { - expression: string - expressionType?: string - inputSerialization: InputSerialization - outputSerialization: OutputSerialization - requestProgress?: { Enabled: boolean } - scanRange?: { Start: number; End: number } -} - -export interface SourceObjectStats { - size: number - metaData: string - lastModicied: Date - versionId: string - etag: string -} - -// No need to export this. But without it - linter error. -export class TargetConfig { - setId(id: unknown): void - - addEvent(newEvent: unknown): void - - addFilterSuffix(suffix: string): void - - addFilterPrefix(prefix: string): void -} - -export interface MakeBucketOpt { - ObjectLocking: boolean -} - -// Exports from library -export class Client extends TypedClient { - // Bucket operations - makeBucket(bucketName: string, region: Region, makeOpts: MakeBucketOpt, callback: NoResultCallback): void - makeBucket(bucketName: string, region: Region, callback: NoResultCallback): void - makeBucket(bucketName: string, callback: NoResultCallback): void - makeBucket(bucketName: string, region?: Region, makeOpts?: MakeBucketOpt): Promise - - bucketExists(bucketName: string, callback: ResultCallback): void - bucketExists(bucketName: string): Promise - - listObjects(bucketName: string, prefix?: string, recursive?: boolean): BucketStream - - listObjectsV2(bucketName: string, prefix?: string, recursive?: boolean, startAfter?: string): BucketStream - - listIncompleteUploads( - bucketName: string, - prefix?: string, - recursive?: boolean, - ): BucketStream - - getBucketVersioning(bucketName: string, callback: ResultCallback): void - getBucketVersioning(bucketName: string): Promise - - setBucketVersioning(bucketName: string, versioningConfig: any, callback: NoResultCallback): void - setBucketVersioning(bucketName: string, versioningConfig: any): Promise - - setBucketTagging(bucketName: string, tags: TagList, callback: NoResultCallback): void - setBucketTagging(bucketName: string, tags: TagList): Promise - - removeBucketTagging(bucketName: string, callback: NoResultCallback): void - removeBucketTagging(bucketName: string): Promise - - setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle, callback: NoResultCallback): void - setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle): Promise - - getBucketLifecycle(bucketName: string, callback: ResultCallback): void - getBucketLifecycle(bucketName: string): Promise - - removeBucketLifecycle(bucketName: string, callback: NoResultCallback): void - removeBucketLifecycle(bucketName: string): Promise - - getBucketEncryption(bucketName: string, callback: ResultCallback): void - getBucketEncryption(bucketName: string): Promise - - setBucketEncryption(bucketName: string, encryptionConfig: Encryption, callback: NoResultCallback): void - setBucketEncryption(bucketName: string, encryptionConfig: Encryption): Promise - - removeBucketEncryption(bucketName: string, callback: NoResultCallback): void - removeBucketEncryption(bucketName: string): Promise - - // Object operations - getObject(bucketName: string, objectName: string, callback: ResultCallback): void - getObject(bucketName: string, objectName: string): Promise - - getPartialObject( - bucketName: string, - objectName: string, - offset: number, - callback: ResultCallback, - ): void - getPartialObject( - bucketName: string, - objectName: string, - offset: number, - length: number, - callback: ResultCallback, - ): void - getPartialObject(bucketName: string, objectName: string, offset: number, length?: number): Promise - - fGetObject(bucketName: string, objectName: string, filePath: string, callback: NoResultCallback): void - fGetObject(bucketName: string, objectName: string, filePath: string): Promise - - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size: number, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size: number, - metaData: ItemBucketMetadata, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size?: number, - metaData?: ItemBucketMetadata, - ): Promise - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - metaData?: ItemBucketMetadata, - ): Promise - - fPutObject( - bucketName: string, - objectName: string, - filePath: string, - metaData: ItemBucketMetadata, - callback: ResultCallback, - ): void - fPutObject( - bucketName: string, - objectName: string, - filePath: string, - metaData?: ItemBucketMetadata, - ): Promise - - copyObject( - bucketName: string, - objectName: string, - sourceObject: string, - conditions: CopyConditions, - callback: ResultCallback, - ): void - copyObject( - bucketName: string, - objectName: string, - sourceObject: string, - conditions: CopyConditions, - ): Promise - - removeObjects(bucketName: string, objectsList: string[], callback: NoResultCallback): void - removeObjects(bucketName: string, objectsList: string[]): Promise - - removeIncompleteUpload(bucketName: string, objectName: string, callback: NoResultCallback): void - removeIncompleteUpload(bucketName: string, objectName: string): Promise - - putObjectRetention(bucketName: string, objectName: string, callback: NoResultCallback): void - putObjectRetention( - bucketName: string, - objectName: string, - retentionOptions: Retention, - callback: NoResultCallback, - ): void - putObjectRetention(bucketName: string, objectName: string, retentionOptions?: Retention): Promise - - getObjectRetention( - bucketName: string, - objectName: string, - options: VersionIdentificator, - callback: ResultCallback, - ): void - getObjectRetention(bucketName: string, objectName: string, options: VersionIdentificator): Promise - - setObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void - setObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions: VersionIdentificator, - callback: NoResultCallback, - ): void - setObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions?: VersionIdentificator, - ): Promise - - removeObjectTagging(bucketName: string, objectName: string, callback: NoResultCallback): void - removeObjectTagging( - bucketName: string, - objectName: string, - removeOptions: VersionIdentificator, - callback: NoResultCallback, - ): void - removeObjectTagging(bucketName: string, objectName: string, removeOptions?: VersionIdentificator): Promise - - composeObject( - destObjConfig: CopyDestinationOptions, - sourceObjList: CopySourceOptions[], - callback: ResultCallback, - ): void - composeObject(destObjConfig: CopyDestinationOptions, sourceObjList: CopySourceOptions[]): Promise - - selectObjectContent( - bucketName: string, - objectName: string, - selectOpts: SelectOptions, - callback: NoResultCallback, - ): void - selectObjectContent(bucketName: string, objectName: string, selectOpts: SelectOptions): Promise - - // Presigned operations - presignedUrl(httpMethod: string, bucketName: string, objectName: string, callback: ResultCallback): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - reqParams: { [key: string]: any }, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - reqParams: { [key: string]: any }, - requestDate: Date, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry?: number, - reqParams?: { [key: string]: any }, - requestDate?: Date, - ): Promise - - presignedGetObject(bucketName: string, objectName: string, callback: ResultCallback): void - presignedGetObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry: number, - respHeaders: { [key: string]: any }, - callback: ResultCallback, - ): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry: number, - respHeaders: { [key: string]: any }, - requestDate: Date, - callback: ResultCallback, - ): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry?: number, - respHeaders?: { [key: string]: any }, - requestDate?: Date, - ): Promise - - presignedPutObject(bucketName: string, objectName: string, callback: ResultCallback): void - presignedPutObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void - presignedPutObject(bucketName: string, objectName: string, expiry?: number): Promise - - presignedPostPolicy(policy: PostPolicy, callback: ResultCallback): void - presignedPostPolicy(policy: PostPolicy): Promise - - // Bucket Policy & Notification operations - getBucketNotification(bucketName: string, callback: ResultCallback): void - getBucketNotification(bucketName: string): Promise - - setBucketNotification( - bucketName: string, - bucketNotificationConfig: NotificationConfig, - callback: NoResultCallback, - ): void - setBucketNotification(bucketName: string, bucketNotificationConfig: NotificationConfig): Promise - - removeAllBucketNotification(bucketName: string, callback: NoResultCallback): void - removeAllBucketNotification(bucketName: string): Promise - - getBucketPolicy(bucketName: string, callback: ResultCallback): void - getBucketPolicy(bucketName: string): Promise - - setBucketPolicy(bucketName: string, bucketPolicy: string, callback: NoResultCallback): void - setBucketPolicy(bucketName: string, bucketPolicy: string): Promise - - listenBucketNotification( - bucketName: string, - prefix: string, - suffix: string, - events: NotificationEvent[], - ): NotificationPoller - - // Other - newPostPolicy(): PostPolicy -} - -export declare class NotificationPoller extends EventEmitter { - stop(): void - - start(): void - - // must to be public? - checkForChanges(): void -} - -export declare class NotificationConfig { - add(target: TopicConfig | QueueConfig | CloudFunctionConfig): void -} - -export declare class TopicConfig extends TargetConfig { - constructor(arn: string) -} - -export declare class QueueConfig extends TargetConfig { - constructor(arn: string) -} - -export declare class CloudFunctionConfig extends TargetConfig { - constructor(arn: string) -} - -export declare function buildARN( - partition: string, - service: string, - region: string, - accountId: string, - resource: string, -): string - -export declare const ObjectCreatedAll: NotificationEvent // s3:ObjectCreated:*' -export declare const ObjectCreatedPut: NotificationEvent // s3:ObjectCreated:Put -export declare const ObjectCreatedPost: NotificationEvent // s3:ObjectCreated:Post -export declare const ObjectCreatedCopy: NotificationEvent // s3:ObjectCreated:Copy -export declare const ObjectCreatedCompleteMultipartUpload: NotificationEvent // s3:ObjectCreated:CompleteMultipartUpload -export declare const ObjectRemovedAll: NotificationEvent // s3:ObjectRemoved:* -export declare const ObjectRemovedDelete: NotificationEvent // s3:ObjectRemoved:Delete -export declare const ObjectRemovedDeleteMarkerCreated: NotificationEvent // s3:ObjectRemoved:DeleteMarkerCreated -export declare const ObjectReducedRedundancyLostObject: NotificationEvent // s3:ReducedRedundancyLostObject diff --git a/src/minio.js b/src/minio.js deleted file mode 100644 index 98d2651c..00000000 --- a/src/minio.js +++ /dev/null @@ -1,2434 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as fs from 'node:fs' -import * as path from 'node:path' -import * as Stream from 'node:stream' - -import async from 'async' -import BlockStream2 from 'block-stream2' -import _ from 'lodash' -import * as querystring from 'query-string' -import { TextEncoder } from 'web-encoding' -import Xml from 'xml' -import xml2js from 'xml2js' - -import * as errors from './errors.ts' -import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION } from './helpers.ts' -import { callbackify } from './internal/callbackify.js' -import { TypedClient } from './internal/client.ts' -import { CopyConditions } from './internal/copy-conditions.ts' -import { - calculateEvenSplits, - extractMetadata, - getScope, - getSourceVersionId, - getVersionId, - insertContentType, - isBoolean, - isFunction, - isNumber, - isObject, - isReadableStream, - isString, - isValidBucketName, - isValidDate, - isValidObjectName, - isValidPrefix, - makeDateLong, - PART_CONSTRAINTS, - partsRequired, - pipesetup, - prependXAMZMeta, - readableStream, - sanitizeETag, - toMd5, - uriEscape, - uriResourceEscape, -} from './internal/helper.ts' -import { PostPolicy } from './internal/post-policy.ts' -import { RETENTION_MODES } from './internal/type.ts' -import { NotificationConfig, NotificationPoller } from './notification.js' -import { ObjectUploader } from './object-uploader.js' -import { promisify } from './promisify.js' -import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' -import * as transformers from './transformers.js' -import { parseSelectObjectContentResponse } from './xml-parsers.js' - -export * from './helpers.ts' -export * from './notification.js' -export { CopyConditions, PostPolicy } - -export class Client extends TypedClient { - // Set application specific information. - // - // Generates User-Agent in the following style. - // - // MinIO (OS; ARCH) LIB/VER APP/VER - // - // __Arguments__ - // * `appName` _string_ - Application name. - // * `appVersion` _string_ - Application version. - setAppInfo(appName, appVersion) { - if (!isString(appName)) { - throw new TypeError(`Invalid appName: ${appName}`) - } - if (appName.trim() === '') { - throw new errors.InvalidArgumentError('Input appName cannot be empty.') - } - if (!isString(appVersion)) { - throw new TypeError(`Invalid appVersion: ${appVersion}`) - } - if (appVersion.trim() === '') { - throw new errors.InvalidArgumentError('Input appVersion cannot be empty.') - } - this.userAgent = `${this.userAgent} ${appName}/${appVersion}` - } - - // Calculate part size given the object size. Part size will be atleast this.partSize - calculatePartSize(size) { - if (!isNumber(size)) { - throw new TypeError('size should be of type "number"') - } - if (size > this.maxObjectSize) { - throw new TypeError(`size should not be more than ${this.maxObjectSize}`) - } - if (this.overRidePartSize) { - return this.partSize - } - var partSize = this.partSize - for (;;) { - // while(true) {...} throws linting error. - // If partSize is big enough to accomodate the object size, then use it. - if (partSize * 10000 > size) { - return partSize - } - // Try part sizes as 64MB, 80MB, 96MB etc. - partSize += 16 * 1024 * 1024 - } - } - - // Creates the bucket `bucketName`. - // - // __Arguments__ - // * `bucketName` _string_ - Name of the bucket - // * `region` _string_ - region valid values are _us-west-1_, _us-west-2_, _eu-west-1_, _eu-central-1_, _ap-southeast-1_, _ap-northeast-1_, _ap-southeast-2_, _sa-east-1_. - // * `makeOpts` _object_ - Options to create a bucket. e.g {ObjectLocking:true} (Optional) - // * `callback(err)` _function_ - callback function with `err` as the error argument. `err` is null if the bucket is successfully created. - makeBucket(bucketName, region, makeOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - // Backward Compatibility - if (isObject(region)) { - cb = makeOpts - makeOpts = region - region = '' - } - if (isFunction(region)) { - cb = region - region = '' - makeOpts = {} - } - if (isFunction(makeOpts)) { - cb = makeOpts - makeOpts = {} - } - - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isObject(makeOpts)) { - throw new TypeError('makeOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var payload = '' - - // Region already set in constructor, validate if - // caller requested bucket location is same. - if (region && this.region) { - if (region !== this.region) { - throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`) - } - } - // sending makeBucket request with XML containing 'us-east-1' fails. For - // default region server expects the request without body - if (region && region !== DEFAULT_REGION) { - var createBucketConfiguration = [] - createBucketConfiguration.push({ - _attr: { - xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', - }, - }) - createBucketConfiguration.push({ - LocationConstraint: region, - }) - var payloadObject = { - CreateBucketConfiguration: createBucketConfiguration, - } - payload = Xml(payloadObject) - } - var method = 'PUT' - var headers = {} - - if (makeOpts.ObjectLocking) { - headers['x-amz-bucket-object-lock-enabled'] = true - } - - if (!region) { - region = DEFAULT_REGION - } - - const processWithRetry = (err) => { - if (err && (region === '' || region === DEFAULT_REGION)) { - if (err.code === 'AuthorizationHeaderMalformed' && err.region !== '') { - // Retry with region returned as part of error - this.makeRequest({ method, bucketName, headers }, payload, [200], err.region, false, cb) - } else { - return cb && cb(err) - } - } - return cb && cb(err) - } - this.makeRequest({ method, bucketName, headers }, payload, [200], region, false, processWithRetry) - } - - // Returns a stream that emits objects that are partially uploaded. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: prefix of the object names that are partially uploaded (optional, default `''`) - // * `recursive` _bool_: directory style listing when false, recursive listing when true (optional, default `false`) - // - // __Return Value__ - // * `stream` _Stream_ : emits objects of the format: - // * `object.key` _string_: name of the object - // * `object.uploadId` _string_: upload ID of the object - // * `object.size` _Integer_: size of the partially uploaded object - listIncompleteUploads(bucket, prefix, recursive) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (!isValidBucketName(bucket)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - var delimiter = recursive ? '' : '/' - var keyMarker = '' - var uploadIdMarker = '' - var uploads = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one upload info per _read() - if (uploads.length) { - return readStream.push(uploads.shift()) - } - if (ended) { - return readStream.push(null) - } - this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - result.prefixes.forEach((prefix) => uploads.push(prefix)) - async.eachSeries( - result.uploads, - (upload, cb) => { - // for each incomplete upload add the sizes of its uploaded parts - this.listParts(bucket, upload.key, upload.uploadId).then((parts) => { - upload.size = parts.reduce((acc, item) => acc + item.size, 0) - uploads.push(upload) - cb() - }, cb) - }, - (err) => { - if (err) { - readStream.emit('error', err) - return - } - if (result.isTruncated) { - keyMarker = result.nextKeyMarker - uploadIdMarker = result.nextUploadIdMarker - } else { - ended = true - } - readStream._read() - }, - ) - }) - } - return readStream - } - - // To check if a bucket already exists. - // - // __Arguments__ - // * `bucketName` _string_ : name of the bucket - // * `callback(err)` _function_ : `err` is `null` if the bucket exists - bucketExists(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'HEAD' - this.makeRequest({ method, bucketName }, '', [200], '', false, (err) => { - if (err) { - if (err.code == 'NoSuchBucket' || err.code == 'NotFound') { - return cb(null, false) - } - return cb(err) - } - cb(null, true) - }) - } - - // Remove the partially uploaded object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `callback(err)` _function_: callback function is called with non `null` value in case of error - removeIncompleteUpload(bucketName, objectName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.IsValidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var removeUploadId - async.during( - (cb) => { - this.findUploadId(bucketName, objectName, (e, uploadId) => { - if (e) { - return cb(e) - } - removeUploadId = uploadId - cb(null, uploadId) - }) - }, - (cb) => { - var method = 'DELETE' - var query = `uploadId=${removeUploadId}` - this.makeRequest({ method, bucketName, objectName, query }, '', [204], '', false, (e) => cb(e)) - }, - cb, - ) - } - - // Callback is called with `error` in case of error or `null` in case of success - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `filePath` _string_: path to which the object data will be written to - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err)` _function_: callback is called with `err` in case of error. - fGetObject(bucketName, objectName, filePath, getOpts = {}, cb) { - // Input validation. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(filePath)) { - throw new TypeError('filePath should be of type "string"') - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - // Internal data. - var partFile - var partFileStream - var objStat - - // Rename wrapper. - var rename = (err) => { - if (err) { - return cb(err) - } - fs.rename(partFile, filePath, cb) - } - - async.waterfall( - [ - (cb) => this.statObject(bucketName, objectName, getOpts, cb), - (result, cb) => { - objStat = result - // Create any missing top level directories. - fs.mkdir(path.dirname(filePath), { recursive: true }, (err) => cb(err)) - }, - (cb) => { - partFile = `${filePath}.${objStat.etag}.part.minio` - fs.stat(partFile, (e, stats) => { - var offset = 0 - if (e) { - partFileStream = fs.createWriteStream(partFile, { flags: 'w' }) - } else { - if (objStat.size === stats.size) { - return rename() - } - offset = stats.size - partFileStream = fs.createWriteStream(partFile, { flags: 'a' }) - } - this.getPartialObject(bucketName, objectName, offset, 0, getOpts, cb) - }) - }, - (downloadStream, cb) => { - pipesetup(downloadStream, partFileStream) - .on('error', (e) => cb(e)) - .on('finish', cb) - }, - (cb) => fs.stat(partFile, cb), - (stats, cb) => { - if (stats.size === objStat.size) { - return cb() - } - cb(new Error('Size mismatch between downloaded file and the object')) - }, - ], - rename, - ) - } - - // Callback is called with readable stream of the object content. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream - getObject(bucketName, objectName, getOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - this.getPartialObject(bucketName, objectName, 0, 0, getOpts, cb) - } - - // Callback is called with readable stream of the partial object content. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `offset` _number_: offset of the object from where the stream will start - // * `length` _number_: length of the object that will be read in the stream (optional, if not specified we read the rest of the file from the offset) - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream - getPartialObject(bucketName, objectName, offset, length, getOpts = {}, cb) { - if (isFunction(length)) { - cb = length - length = 0 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isNumber(offset)) { - throw new TypeError('offset should be of type "number"') - } - if (!isNumber(length)) { - throw new TypeError('length should be of type "number"') - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var range = '' - if (offset || length) { - if (offset) { - range = `bytes=${+offset}-` - } else { - range = 'bytes=0-' - offset = 0 - } - if (length) { - range += `${+length + offset - 1}` - } - } - - var headers = {} - if (range !== '') { - headers.range = range - } - - var expectedStatusCodes = [200] - if (range) { - expectedStatusCodes.push(206) - } - var method = 'GET' - - var query = querystring.stringify(getOpts) - this.makeRequest({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes, '', true, cb) - } - - // Uploads the object using contents from a file - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `filePath` _string_: file path of the file to be uploaded - // * `metaData` _Javascript Object_: metaData assosciated with the object - // * `callback(err, objInfo)` _function_: non null `err` indicates error, `objInfo` _object_ which contains versionId and etag. - fPutObject(bucketName, objectName, filePath, metaData, callback) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (!isString(filePath)) { - throw new TypeError('filePath should be of type "string"') - } - if (isFunction(metaData)) { - callback = metaData - metaData = {} // Set metaData empty if no metaData provided. - } - if (!isObject(metaData)) { - throw new TypeError('metaData should be of type "object"') - } - - // Inserts correct `content-type` attribute based on metaData and filePath - metaData = insertContentType(metaData, filePath) - - fs.lstat(filePath, (err, stat) => { - if (err) { - return callback(err) - } - return this.putObject(bucketName, objectName, fs.createReadStream(filePath), stat.size, metaData, callback) - }) - } - - // Uploads the object. - // - // Uploading a stream - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `stream` _Stream_: Readable stream - // * `size` _number_: size of the object (optional) - // * `callback(err, etag)` _function_: non null `err` indicates error, `etag` _string_ is the etag of the object uploaded. - // - // Uploading "Buffer" or "string" - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `string or Buffer` _string_ or _Buffer_: string or buffer - // * `callback(err, objInfo)` _function_: `err` is `null` in case of success and `info` will have the following object details: - // * `etag` _string_: etag of the object - // * `versionId` _string_: versionId of the object - putObject(bucketName, objectName, stream, size, metaData, callback) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - // We'll need to shift arguments to the left because of size and metaData. - if (isFunction(size)) { - callback = size - metaData = {} - } else if (isFunction(metaData)) { - callback = metaData - metaData = {} - } - - // We'll need to shift arguments to the left because of metaData - // and size being optional. - if (isObject(size)) { - metaData = size - } - - // Ensures Metadata has appropriate prefix for A3 API - metaData = prependXAMZMeta(metaData) - if (typeof stream === 'string' || stream instanceof Buffer) { - // Adapts the non-stream interface into a stream. - size = stream.length - stream = readableStream(stream) - } else if (!isReadableStream(stream)) { - throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"') - } - - if (!isFunction(callback)) { - throw new TypeError('callback should be of type "function"') - } - - if (isNumber(size) && size < 0) { - throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`) - } - - // Get the part size and forward that to the BlockStream. Default to the - // largest block size possible if necessary. - if (!isNumber(size)) { - size = this.maxObjectSize - } - - size = this.calculatePartSize(size) - - // s3 requires that all non-end chunks be at least `this.partSize`, - // so we chunk the stream until we hit either that size or the end before - // we flush it to s3. - let chunker = new BlockStream2({ size, zeroPadding: false }) - - // This is a Writable stream that can be written to in order to upload - // to the specified bucket and object automatically. - let uploader = new ObjectUploader(this, bucketName, objectName, size, metaData, callback) - // stream => chunker => uploader - pipesetup(stream, chunker, uploader) - } - - // Copy the object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `srcObject` _string_: path of the source object to be copied - // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) - // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object - copyObjectV1(arg1, arg2, arg3, arg4, arg5) { - var bucketName = arg1 - var objectName = arg2 - var srcObject = arg3 - var conditions, cb - if (typeof arg4 == 'function' && arg5 === undefined) { - conditions = null - cb = arg4 - } else { - conditions = arg4 - cb = arg5 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(srcObject)) { - throw new TypeError('srcObject should be of type "string"') - } - if (srcObject === '') { - throw new errors.InvalidPrefixError(`Empty source prefix`) - } - - if (conditions !== null && !(conditions instanceof CopyConditions)) { - throw new TypeError('conditions should be of type "CopyConditions"') - } - - var headers = {} - headers['x-amz-copy-source'] = uriResourceEscape(srcObject) - - if (conditions !== null) { - if (conditions.modified !== '') { - headers['x-amz-copy-source-if-modified-since'] = conditions.modified - } - if (conditions.unmodified !== '') { - headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified - } - if (conditions.matchETag !== '') { - headers['x-amz-copy-source-if-match'] = conditions.matchETag - } - if (conditions.matchEtagExcept !== '') { - headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept - } - } - - var method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => cb(null, data)) - }) - } - - /** - * Internal Method to perform copy of an object. - * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions - * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions - * @param cb __function__ called with null if there is an error - * @returns Promise if no callack is passed. - */ - copyObjectV2(sourceConfig, destConfig, cb) { - if (!(sourceConfig instanceof CopySourceOptions)) { - throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') - } - if (!(destConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - if (!destConfig.validate()) { - return false - } - if (!destConfig.validate()) { - return false - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) - - const bucketName = destConfig.Bucket - const objectName = destConfig.Object - - const method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - const transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => { - const resHeaders = response.headers - - const copyObjResponse = { - Bucket: destConfig.Bucket, - Key: destConfig.Object, - LastModified: data.LastModified, - MetaData: extractMetadata(resHeaders), - VersionId: getVersionId(resHeaders), - SourceVersionId: getSourceVersionId(resHeaders), - Etag: sanitizeETag(resHeaders.etag), - Size: +resHeaders['content-length'], - } - - return cb(null, copyObjResponse) - }) - }) - } - - // Backward compatibility for Copy Object API. - copyObject(...allArgs) { - if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { - return this.copyObjectV2(...arguments) - } - return this.copyObjectV1(...arguments) - } - - // list a batch of objects - listObjectsQuery(bucketName, prefix, marker, listQueryOpts = {}) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(marker)) { - throw new TypeError('marker should be of type "string"') - } - let { Delimiter, MaxKeys, IncludeVersion } = listQueryOpts - - if (!isObject(listQueryOpts)) { - throw new TypeError('listQueryOpts should be of type "object"') - } - - if (!isString(Delimiter)) { - throw new TypeError('Delimiter should be of type "string"') - } - if (!isNumber(MaxKeys)) { - throw new TypeError('MaxKeys should be of type "number"') - } - - const queries = [] - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(Delimiter)}`) - queries.push(`encoding-type=url`) - - if (IncludeVersion) { - queries.push(`versions`) - } - - if (marker) { - marker = uriEscape(marker) - if (IncludeVersion) { - queries.push(`key-marker=${marker}`) - } else { - queries.push(`marker=${marker}`) - } - } - - // no need to escape maxKeys - if (MaxKeys) { - if (MaxKeys >= 1000) { - MaxKeys = 1000 - } - queries.push(`max-keys=${MaxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - - var method = 'GET' - var transformer = transformers.getListObjectsTransformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // List the objects in the bucket. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `listOpts _object_: query params to list object with below keys - // * listOpts.MaxKeys _int_ maximum number of keys to return - // * listOpts.IncludeVersion _bool_ true|false to include versions. - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - // * `obj.isDeleteMarker` _boolean_: true if it is a delete marker - // * `obj.versionId` _string_: versionId of the object - listObjects(bucketName, prefix, recursive, listOpts = {}) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isObject(listOpts)) { - throw new TypeError('listOpts should be of type "object"') - } - var marker = '' - const listQueryOpts = { - Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/' - MaxKeys: 1000, - IncludeVersion: listOpts.IncludeVersion, - } - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - marker = result.nextMarker || result.versionIdMarker - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // listObjectsV2Query - (List Objects V2) - List some or all (up to 1000) of the objects in a bucket. - // - // You can use the request parameters as selection criteria to return a subset of the objects in a bucket. - // request parameters :- - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: Limits the response to keys that begin with the specified prefix. - // * `continuation-token` _string_: Used to continue iterating over a set of objects. - // * `delimiter` _string_: A delimiter is a character you use to group keys. - // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. - // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. - listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(continuationToken)) { - throw new TypeError('continuationToken should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - if (!isNumber(maxKeys)) { - throw new TypeError('maxKeys should be of type "number"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - var queries = [] - - // Call for listing objects v2 API - queries.push(`list-type=2`) - queries.push(`encoding-type=url`) - - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - - if (continuationToken) { - continuationToken = uriEscape(continuationToken) - queries.push(`continuation-token=${continuationToken}`) - } - // Set start-after - if (startAfter) { - startAfter = uriEscape(startAfter) - queries.push(`start-after=${startAfter}`) - } - // no need to escape maxKeys - if (maxKeys) { - if (maxKeys >= 1000) { - maxKeys = 1000 - } - queries.push(`max-keys=${maxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListObjectsV2Transformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // List the objects in the bucket using S3 ListObjects V2 - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) - // - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - listObjectsV2(bucketName, prefix, recursive, startAfter) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (startAfter === undefined) { - startAfter = '' - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - // if recursive is false set delimiter to '/' - var delimiter = recursive ? '' : '/' - var continuationToken = '' - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - continuationToken = result.nextContinuationToken - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // Remove all the objects residing in the objectsList. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectsList` _array_: array of objects of one of the following: - // * List of Object names as array of strings which are object keys: ['objectname1','objectname2'] - // * List of Object name and versionId as an object: [{name:"objectname",versionId:"my-version-id"}] - - removeObjects(bucketName, objectsList, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!Array.isArray(objectsList)) { - throw new errors.InvalidArgumentError('objectsList should be a list') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const maxEntries = 1000 - const query = 'delete' - const method = 'POST' - - let result = objectsList.reduce( - (result, entry) => { - result.list.push(entry) - if (result.list.length === maxEntries) { - result.listOfList.push(result.list) - result.list = [] - } - return result - }, - { listOfList: [], list: [] }, - ) - - if (result.list.length > 0) { - result.listOfList.push(result.list) - } - - const encoder = new TextEncoder() - const batchResults = [] - - async.eachSeries( - result.listOfList, - (list, batchCb) => { - var objects = [] - list.forEach(function (value) { - if (isObject(value)) { - objects.push({ Key: value.name, VersionId: value.versionId }) - } else { - objects.push({ Key: value }) - } - }) - let deleteObjects = { Delete: { Quiet: true, Object: objects } } - const builder = new xml2js.Builder({ headless: true }) - let payload = builder.buildObject(deleteObjects) - payload = Buffer.from(encoder.encode(payload)) - const headers = {} - - headers['Content-MD5'] = toMd5(payload) - - let removeObjectsResult - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', true, (e, response) => { - if (e) { - return batchCb(e) - } - pipesetup(response, transformers.removeObjectsTransformer()) - .on('data', (data) => { - removeObjectsResult = data - }) - .on('error', (e) => { - return batchCb(e, null) - }) - .on('end', () => { - batchResults.push(removeObjectsResult) - return batchCb(null, removeObjectsResult) - }) - }) - }, - () => { - cb(null, _.flatten(batchResults)) - }, - ) - } - - // Get the policy on a bucket or an object prefix. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `callback(err, policy)` _function_: callback function - getBucketPolicy(bucketName, cb) { - // Validate arguments. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let method = 'GET' - let query = 'policy' - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let policy = Buffer.from('') - pipesetup(response, transformers.getConcater()) - .on('data', (data) => (policy = data)) - .on('error', cb) - .on('end', () => { - cb(null, policy.toString()) - }) - }) - } - - // Set the policy on a bucket or an object prefix. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `bucketPolicy` _string_: bucket policy (JSON stringify'ed) - // * `callback(err)` _function_: callback function - setBucketPolicy(bucketName, policy, cb) { - // Validate arguments. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isString(policy)) { - throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let method = 'DELETE' - let query = 'policy' - - if (policy) { - method = 'PUT' - } - - this.makeRequest({ method, bucketName, query }, policy, [204], '', false, cb) - } - - // Generate a generic presigned URL which can be - // used for HTTP methods GET, PUT, HEAD and DELETE - // - // __Arguments__ - // * `method` _string_: name of the HTTP method - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - // * `reqParams` _object_: request parameters (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} - // * `requestDate` _Date_: A date object, the url will be issued at (optional) - presignedUrl(method, bucketName, objectName, expires, reqParams, requestDate, cb) { - if (this.anonymous) { - throw new errors.AnonymousRequestError('Presigned ' + method + ' url cannot be generated for anonymous requests') - } - if (isFunction(requestDate)) { - cb = requestDate - requestDate = new Date() - } - if (isFunction(reqParams)) { - cb = reqParams - reqParams = {} - requestDate = new Date() - } - if (isFunction(expires)) { - cb = expires - reqParams = {} - expires = 24 * 60 * 60 * 7 // 7 days in seconds - requestDate = new Date() - } - if (!isNumber(expires)) { - throw new TypeError('expires should be of type "number"') - } - if (!isObject(reqParams)) { - throw new TypeError('reqParams should be of type "object"') - } - if (!isValidDate(requestDate)) { - throw new TypeError('requestDate should be of type "Date" and valid') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var query = querystring.stringify(reqParams) - this.getBucketRegion(bucketName, (e, region) => { - if (e) { - return cb(e) - } - // This statement is added to ensure that we send error through - // callback on presign failure. - var url - var reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query }) - - this.checkAndRefreshCreds() - try { - url = presignSignatureV4( - reqOptions, - this.accessKey, - this.secretKey, - this.sessionToken, - region, - requestDate, - expires, - ) - } catch (pe) { - return cb(pe) - } - cb(null, url) - }) - } - - // Generate a presigned URL for GET - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - // * `respHeaders` _object_: response headers to override or request params for query (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} - // * `requestDate` _Date_: A date object, the url will be issued at (optional) - presignedGetObject(bucketName, objectName, expires, respHeaders, requestDate, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (isFunction(respHeaders)) { - cb = respHeaders - respHeaders = {} - requestDate = new Date() - } - - var validRespHeaders = [ - 'response-content-type', - 'response-content-language', - 'response-expires', - 'response-cache-control', - 'response-content-disposition', - 'response-content-encoding', - ] - validRespHeaders.forEach((header) => { - if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) { - throw new TypeError(`response header ${header} should be of type "string"`) - } - }) - return this.presignedUrl('GET', bucketName, objectName, expires, respHeaders, requestDate, cb) - } - - // Generate a presigned URL for PUT. Using this URL, the browser can upload to S3 only with the specified object name. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - presignedPutObject(bucketName, objectName, expires, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - return this.presignedUrl('PUT', bucketName, objectName, expires, cb) - } - - // return PostPolicy object - newPostPolicy() { - return new PostPolicy() - } - - // presignedPostPolicy can be used in situations where we want more control on the upload than what - // presignedPutObject() provides. i.e Using presignedPostPolicy we will be able to put policy restrictions - // on the object's `name` `bucket` `expiry` `Content-Type` `Content-Disposition` `metaData` - presignedPostPolicy(postPolicy, cb) { - if (this.anonymous) { - throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests') - } - if (!isObject(postPolicy)) { - throw new TypeError('postPolicy should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - this.getBucketRegion(postPolicy.formData.bucket, (e, region) => { - if (e) { - return cb(e) - } - var date = new Date() - var dateStr = makeDateLong(date) - - this.checkAndRefreshCreds() - - if (!postPolicy.policy.expiration) { - // 'expiration' is mandatory field for S3. - // Set default expiration date of 7 days. - var expires = new Date() - expires.setSeconds(24 * 60 * 60 * 7) - postPolicy.setExpires(expires) - } - - postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr]) - postPolicy.formData['x-amz-date'] = dateStr - - postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256']) - postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' - - postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)]) - postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date) - - if (this.sessionToken) { - postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken]) - postPolicy.formData['x-amz-security-token'] = this.sessionToken - } - - var policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64') - - postPolicy.formData.policy = policyBase64 - - var signature = postPresignSignatureV4(region, date, this.secretKey, policyBase64) - - postPolicy.formData['x-amz-signature'] = signature - var opts = {} - opts.region = region - opts.bucketName = postPolicy.formData.bucket - var reqOptions = this.getRequestOptions(opts) - var portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}` - var urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}` - cb(null, { postURL: urlStr, formData: postPolicy.formData }) - }) - } - - // Complete the multipart upload. After all the parts are uploaded issuing - // this call will aggregate the parts on the server into a single object. - completeMultipartUpload(bucketName, objectName, uploadId, etags, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isObject(etags)) { - throw new TypeError('etags should be of type "Array"') - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - - var method = 'POST' - var query = `uploadId=${uriEscape(uploadId)}` - - var parts = [] - - etags.forEach((element) => { - parts.push({ - Part: [ - { - PartNumber: element.part, - }, - { - ETag: element.etag, - }, - ], - }) - }) - - var payloadObject = { CompleteMultipartUpload: parts } - var payload = Xml(payloadObject) - - this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCompleteMultipartTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (result) => { - if (result.errCode) { - // Multipart Complete API returns an error XML after a 200 http status - cb(new errors.S3Error(result.errMessage)) - } else { - const completeMultipartResult = { - etag: result.etag, - versionId: getVersionId(response.headers), - } - cb(null, completeMultipartResult) - } - }) - }) - } - - // Called by listIncompleteUploads to fetch a batch of incomplete uploads. - listIncompleteUploadsQuery(bucketName, prefix, keyMarker, uploadIdMarker, delimiter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(keyMarker)) { - throw new TypeError('keyMarker should be of type "string"') - } - if (!isString(uploadIdMarker)) { - throw new TypeError('uploadIdMarker should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - var queries = [] - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - - if (keyMarker) { - keyMarker = uriEscape(keyMarker) - queries.push(`key-marker=${keyMarker}`) - } - if (uploadIdMarker) { - queries.push(`upload-id-marker=${uploadIdMarker}`) - } - - var maxUploads = 1000 - queries.push(`max-uploads=${maxUploads}`) - queries.sort() - queries.unshift('uploads') - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListMultipartTransformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // Find uploadId of an incomplete upload. - findUploadId(bucketName, objectName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - var latestUpload - var listNext = (keyMarker, uploadIdMarker) => { - this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '') - .on('error', (e) => cb(e)) - .on('data', (result) => { - result.uploads.forEach((upload) => { - if (upload.key === objectName) { - if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) { - latestUpload = upload - return - } - } - }) - if (result.isTruncated) { - listNext(result.nextKeyMarker, result.nextUploadIdMarker) - return - } - if (latestUpload) { - return cb(null, latestUpload.uploadId) - } - cb(null, undefined) - }) - } - listNext('', '') - } - - // Remove all the notification configurations in the S3 provider - setBucketNotification(bucketName, config, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(config)) { - throw new TypeError('notification config should be of type "Object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'PUT' - var query = 'notification' - var builder = new xml2js.Builder({ - rootName: 'NotificationConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - var payload = builder.buildObject(config) - this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) - } - - removeAllBucketNotification(bucketName, cb) { - this.setBucketNotification(bucketName, new NotificationConfig(), cb) - } - - // Return the list of notification configurations stored - // in the S3 provider - getBucketNotification(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'GET' - var query = 'notification' - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getBucketNotificationTransformer() - var bucketNotification - pipesetup(response, transformer) - .on('data', (result) => (bucketNotification = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, bucketNotification)) - }) - } - - // Listens for bucket notifications. Returns an EventEmitter. - listenBucketNotification(bucketName, prefix, suffix, events) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix must be of type string') - } - if (!isString(suffix)) { - throw new TypeError('suffix must be of type string') - } - if (!Array.isArray(events)) { - throw new TypeError('events must be of type Array') - } - let listener = new NotificationPoller(this, bucketName, prefix, suffix, events) - listener.start() - - return listener - } - - getBucketVersioning(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - var method = 'GET' - var query = 'versioning' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let versionConfig = Buffer.from('') - pipesetup(response, transformers.bucketVersioningTransformer()) - .on('data', (data) => { - versionConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, versionConfig) - }) - }) - } - - setBucketVersioning(bucketName, versionConfig, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!Object.keys(versionConfig).length) { - throw new errors.InvalidArgumentError('versionConfig should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var method = 'PUT' - var query = 'versioning' - var builder = new xml2js.Builder({ - rootName: 'VersioningConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - var payload = builder.buildObject(versionConfig) - - this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) - } - - /** To set Tags on a bucket or object based on the params - * __Arguments__ - * taggingParams _object_ Which contains the following properties - * bucketName _string_, - * objectName _string_ (Optional), - * tags _object_ of the form {'':'','':''} - * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setTagging(taggingParams) { - const { bucketName, objectName, tags, putOpts = {}, cb } = taggingParams - const method = 'PUT' - let query = 'tagging' - - if (putOpts && putOpts.versionId) { - query = `${query}&versionId=${putOpts.versionId}` - } - const tagsList = [] - for (const [key, value] of Object.entries(tags)) { - tagsList.push({ Key: key, Value: value }) - } - const taggingConfig = { - Tagging: { - TagSet: { - Tag: tagsList, - }, - }, - } - const encoder = new TextEncoder() - const headers = {} - const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } }) - let payload = builder.buildObject(taggingConfig) - payload = Buffer.from(encoder.encode(payload)) - headers['Content-MD5'] = toMd5(payload) - const requestOptions = { method, bucketName, query, headers } - - if (objectName) { - requestOptions['objectName'] = objectName - } - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest(requestOptions, payload, [200], '', false, cb) - } - - /** Set Tags on a Bucket - * __Arguments__ - * bucketName _string_ - * tags _object_ of the form {'':'','':''} - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setBucketTagging(bucketName, tags, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(tags)) { - throw new errors.InvalidArgumentError('tags should be of type "object"') - } - if (Object.keys(tags).length > 10) { - throw new errors.InvalidArgumentError('maximum tags allowed is 10"') - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - return this.setTagging({ bucketName, tags, cb }) - } - - /** Set Tags on an Object - * __Arguments__ - * bucketName _string_ - * objectName _string_ - * * tags _object_ of the form {'':'','':''} - * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setObjectTagging(bucketName, objectName, tags, putOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - - if (isFunction(putOpts)) { - cb = putOpts - putOpts = {} - } - - if (!isObject(tags)) { - throw new errors.InvalidArgumentError('tags should be of type "object"') - } - if (Object.keys(tags).length > 10) { - throw new errors.InvalidArgumentError('Maximum tags allowed is 10"') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - return this.setTagging({ bucketName, objectName, tags, putOpts, cb }) - } - - /** Remove Tags on an Bucket/Object based on params - * __Arguments__ - * bucketName _string_ - * objectName _string_ (optional) - * removeOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeTagging({ bucketName, objectName, removeOpts, cb }) { - const method = 'DELETE' - let query = 'tagging' - - if (removeOpts && Object.keys(removeOpts).length && removeOpts.versionId) { - query = `${query}&versionId=${removeOpts.versionId}` - } - const requestOptions = { method, bucketName, objectName, query } - - if (objectName) { - requestOptions['objectName'] = objectName - } - this.makeRequest(requestOptions, '', [200, 204], '', true, cb) - } - - /** Remove Tags associated with a bucket - * __Arguments__ - * bucketName _string_ - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeBucketTagging(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - return this.removeTagging({ bucketName, cb }) - } - - /** Remove tags associated with an object - * __Arguments__ - * bucketName _string_ - * objectName _string_ - * removeOpts _object_ (Optional) e.g. {VersionID:"my-object-version-id"} - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeObjectTagging(bucketName, objectName, removeOpts, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - if (isFunction(removeOpts)) { - cb = removeOpts - removeOpts = {} - } - if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) { - throw new errors.InvalidArgumentError('removeOpts should be of type "object"') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - return this.removeTagging({ bucketName, objectName, removeOpts, cb }) - } - - /** - * Apply lifecycle configuration on a bucket. - * bucketName _string_ - * policyConfig _object_ a valid policy configuration object. - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - applyBucketLifecycle(bucketName, policyConfig, cb) { - const method = 'PUT' - const query = 'lifecycle' - - const encoder = new TextEncoder() - const headers = {} - const builder = new xml2js.Builder({ - rootName: 'LifecycleConfiguration', - headless: true, - renderOpts: { pretty: false }, - }) - let payload = builder.buildObject(policyConfig) - payload = Buffer.from(encoder.encode(payload)) - const requestOptions = { method, bucketName, query, headers } - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest(requestOptions, payload, [200], '', false, cb) - } - - /** Remove lifecycle configuration of a bucket. - * bucketName _string_ - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeBucketLifecycle(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'DELETE' - const query = 'lifecycle' - this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) - } - - /** Set/Override lifecycle configuration on a bucket. if the configuration is empty, it removes the configuration. - * bucketName _string_ - * lifeCycleConfig _object_ one of the following values: (null or '') to remove the lifecycle configuration. or a valid lifecycle configuration - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setBucketLifecycle(bucketName, lifeCycleConfig = null, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (_.isEmpty(lifeCycleConfig)) { - this.removeBucketLifecycle(bucketName, cb) - } else { - this.applyBucketLifecycle(bucketName, lifeCycleConfig, cb) - } - } - - /** Get lifecycle configuration on a bucket. - * bucketName _string_ - * `cb(config)` _function_ - callback function with lifecycle configuration as the error argument. - */ - getBucketLifecycle(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'GET' - const query = 'lifecycle' - const requestOptions = { method, bucketName, query } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - const transformer = transformers.lifecycleTransformer() - if (e) { - return cb(e) - } - let lifecycleConfig - pipesetup(response, transformer) - .on('data', (result) => (lifecycleConfig = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, lifecycleConfig)) - }) - } - - putObjectRetention(bucketName, objectName, retentionOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(retentionOpts)) { - throw new errors.InvalidArgumentError('retentionOpts should be of type "object"') - } else { - if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) { - throw new errors.InvalidArgumentError('Invalid value for governanceBypass', retentionOpts.governanceBypass) - } - if ( - retentionOpts.mode && - ![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode) - ) { - throw new errors.InvalidArgumentError('Invalid object retention mode ', retentionOpts.mode) - } - if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) { - throw new errors.InvalidArgumentError('Invalid value for retainUntilDate', retentionOpts.retainUntilDate) - } - if (retentionOpts.versionId && !isString(retentionOpts.versionId)) { - throw new errors.InvalidArgumentError('Invalid value for versionId', retentionOpts.versionId) - } - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'PUT' - let query = 'retention' - - const headers = {} - if (retentionOpts.governanceBypass) { - headers['X-Amz-Bypass-Governance-Retention'] = true - } - - const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true }) - const params = {} - - if (retentionOpts.mode) { - params.Mode = retentionOpts.mode - } - if (retentionOpts.retainUntilDate) { - params.RetainUntilDate = retentionOpts.retainUntilDate - } - if (retentionOpts.versionId) { - query += `&versionId=${retentionOpts.versionId}` - } - - let payload = builder.buildObject(params) - - headers['Content-MD5'] = toMd5(payload) - this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200, 204], '', false, cb) - } - - getObjectRetention(bucketName, objectName, getOpts, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(getOpts)) { - throw new errors.InvalidArgumentError('callback should be of type "object"') - } else if (getOpts.versionId && !isString(getOpts.versionId)) { - throw new errors.InvalidArgumentError('VersionID should be of type "string"') - } - if (cb && !isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - let query = 'retention' - if (getOpts.versionId) { - query += `&versionId=${getOpts.versionId}` - } - - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let retentionConfig = Buffer.from('') - pipesetup(response, transformers.objectRetentionTransformer()) - .on('data', (data) => { - retentionConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, retentionConfig) - }) - }) - } - - setBucketEncryption(bucketName, encryptionConfig, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - - if (isFunction(encryptionConfig)) { - cb = encryptionConfig - encryptionConfig = null - } - - if (!_.isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) { - throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed.: ' + encryptionConfig.Rule) - } - if (cb && !isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let encryptionObj = encryptionConfig - if (_.isEmpty(encryptionConfig)) { - encryptionObj = { - // Default MinIO Server Supported Rule - Rule: [ - { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: 'AES256', - }, - }, - ], - } - } - - let method = 'PUT' - let query = 'encryption' - let builder = new xml2js.Builder({ - rootName: 'ServerSideEncryptionConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - let payload = builder.buildObject(encryptionObj) - - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getBucketEncryption(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'encryption' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let bucketEncConfig = Buffer.from('') - pipesetup(response, transformers.bucketEncryptionTransformer()) - .on('data', (data) => { - bucketEncConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, bucketEncConfig) - }) - }) - } - removeBucketEncryption(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'DELETE' - const query = 'encryption' - - this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) - } - - /** - * Internal method to upload a part during compose object. - * @param partConfig __object__ contains the following. - * bucketName __string__ - * objectName __string__ - * uploadID __string__ - * partNumber __number__ - * headers __object__ - * @param cb called with null incase of error. - */ - uploadPartCopy(partConfig, cb) { - const { bucketName, objectName, uploadID, partNumber, headers } = partConfig - - const method = 'PUT' - let query = `uploadId=${uploadID}&partNumber=${partNumber}` - const requestOptions = { method, bucketName, objectName: objectName, query, headers } - return this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - let partCopyResult = Buffer.from('') - if (e) { - return cb(e) - } - pipesetup(response, transformers.uploadPartTransformer()) - .on('data', (data) => { - partCopyResult = data - }) - .on('error', cb) - .on('end', () => { - let uploadPartCopyRes = { - etag: sanitizeETag(partCopyResult.ETag), - key: objectName, - part: partNumber, - } - - cb(null, uploadPartCopyRes) - }) - }) - } - - composeObject(destObjConfig = {}, sourceObjList = [], cb) { - const me = this // many async flows. so store the ref. - const sourceFilesLength = sourceObjList.length - - if (!Array.isArray(sourceObjList)) { - throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') - } - if (!(destObjConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - - if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, - ) - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - for (let i = 0; i < sourceFilesLength; i++) { - if (!sourceObjList[i].validate()) { - return false - } - } - - if (!destObjConfig.validate()) { - return false - } - - const getStatOptions = (srcConfig) => { - let statOpts = {} - if (!_.isEmpty(srcConfig.VersionID)) { - statOpts = { - versionId: srcConfig.VersionID, - } - } - return statOpts - } - const srcObjectSizes = [] - let totalSize = 0 - let totalParts = 0 - - const sourceObjStats = sourceObjList.map((srcItem) => - me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), - ) - - return Promise.all(sourceObjStats) - .then((srcObjectInfos) => { - const validatedStats = srcObjectInfos.map((resItemStat, index) => { - const srcConfig = sourceObjList[index] - - let srcCopySize = resItemStat.size - // Check if a segment is specified, and if so, is the - // segment within object bounds? - if (srcConfig.MatchRange) { - // Since range is specified, - // 0 <= src.srcStart <= src.srcEnd - // so only invalid case to check is: - const srcStart = srcConfig.Start - const srcEnd = srcConfig.End - if (srcEnd >= srcCopySize || srcStart < 0) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, - ) - } - srcCopySize = srcEnd - srcStart + 1 - } - - // Only the last source may be less than `absMinPartSize` - if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, - ) - } - - // Is data to copy too large? - totalSize += srcCopySize - if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { - throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) - } - - // record source size - srcObjectSizes[index] = srcCopySize - - // calculate parts needed for current source - totalParts += partsRequired(srcCopySize) - // Do we need more parts than we are allowed? - if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, - ) - } - - return resItemStat - }) - - if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { - return this.copyObject(sourceObjList[0], destObjConfig, cb) // use copyObjectV2 - } - - // preserve etag to avoid modification of object while copying. - for (let i = 0; i < sourceFilesLength; i++) { - sourceObjList[i].MatchETag = validatedStats[i].etag - } - - const splitPartSizeList = validatedStats.map((resItemStat, idx) => { - const calSize = calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) - return calSize - }) - - function getUploadPartConfigList(uploadId) { - const uploadPartConfigList = [] - - splitPartSizeList.forEach((splitSize, splitIndex) => { - const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize - - let partIndex = splitIndex + 1 // part index starts from 1. - const totalUploads = Array.from(startIdx) - - const headers = sourceObjList[splitIndex].getHeaders() - - totalUploads.forEach((splitStart, upldCtrIdx) => { - let splitEnd = endIdx[upldCtrIdx] - - const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` - headers['x-amz-copy-source'] = `${sourceObj}` - headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` - - const uploadPartConfig = { - bucketName: destObjConfig.Bucket, - objectName: destObjConfig.Object, - uploadID: uploadId, - partNumber: partIndex, - headers: headers, - sourceObj: sourceObj, - } - - uploadPartConfigList.push(uploadPartConfig) - }) - }) - - return uploadPartConfigList - } - - const performUploadParts = (uploadId) => { - const uploadList = getUploadPartConfigList(uploadId) - - async.map(uploadList, me.uploadPartCopy.bind(me), (err, res) => { - if (err) { - this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId).then( - () => cb(), - (err) => cb(err), - ) - return - } - const partsDone = res.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) - return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone, cb) - }) - } - - const newUploadHeaders = destObjConfig.getHeaders() - - me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders).then( - (uploadId) => { - performUploadParts(uploadId) - }, - (err) => { - cb(err, null) - }, - ) - }) - .catch((error) => { - cb(error, null) - }) - } - selectObjectContent(bucketName, objectName, selectOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!_.isEmpty(selectOpts)) { - if (!isString(selectOpts.expression)) { - throw new TypeError('sqlExpression should be of type "string"') - } - if (!_.isEmpty(selectOpts.inputSerialization)) { - if (!isObject(selectOpts.inputSerialization)) { - throw new TypeError('inputSerialization should be of type "object"') - } - } else { - throw new TypeError('inputSerialization is required') - } - if (!_.isEmpty(selectOpts.outputSerialization)) { - if (!isObject(selectOpts.outputSerialization)) { - throw new TypeError('outputSerialization should be of type "object"') - } - } else { - throw new TypeError('outputSerialization is required') - } - } else { - throw new TypeError('valid select configuration is required') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'POST' - let query = `select` - query += '&select-type=2' - - const config = [ - { - Expression: selectOpts.expression, - }, - { - ExpressionType: selectOpts.expressionType || 'SQL', - }, - { - InputSerialization: [selectOpts.inputSerialization], - }, - { - OutputSerialization: [selectOpts.outputSerialization], - }, - ] - - // Optional - if (selectOpts.requestProgress) { - config.push({ RequestProgress: selectOpts.requestProgress }) - } - // Optional - if (selectOpts.scanRange) { - config.push({ ScanRange: selectOpts.scanRange }) - } - - const builder = new xml2js.Builder({ - rootName: 'SelectObjectContentRequest', - renderOpts: { pretty: false }, - headless: true, - }) - const payload = builder.buildObject(config) - - this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let selectResult - pipesetup(response, transformers.selectObjectContentTransformer()) - .on('data', (data) => { - selectResult = parseSelectObjectContentResponse(data) - }) - .on('error', cb) - .on('end', () => { - cb(null, selectResult) - }) - }) - } -} - -// Promisify various public-facing APIs on the Client module. -Client.prototype.makeBucket = promisify(Client.prototype.makeBucket) -Client.prototype.bucketExists = promisify(Client.prototype.bucketExists) - -Client.prototype.getObject = promisify(Client.prototype.getObject) -Client.prototype.getPartialObject = promisify(Client.prototype.getPartialObject) -Client.prototype.fGetObject = promisify(Client.prototype.fGetObject) -Client.prototype.putObject = promisify(Client.prototype.putObject) -Client.prototype.fPutObject = promisify(Client.prototype.fPutObject) -Client.prototype.copyObject = promisify(Client.prototype.copyObject) -Client.prototype.removeObjects = promisify(Client.prototype.removeObjects) - -Client.prototype.presignedUrl = promisify(Client.prototype.presignedUrl) -Client.prototype.presignedGetObject = promisify(Client.prototype.presignedGetObject) -Client.prototype.presignedPutObject = promisify(Client.prototype.presignedPutObject) -Client.prototype.presignedPostPolicy = promisify(Client.prototype.presignedPostPolicy) -Client.prototype.getBucketNotification = promisify(Client.prototype.getBucketNotification) -Client.prototype.setBucketNotification = promisify(Client.prototype.setBucketNotification) -Client.prototype.removeAllBucketNotification = promisify(Client.prototype.removeAllBucketNotification) -Client.prototype.getBucketPolicy = promisify(Client.prototype.getBucketPolicy) -Client.prototype.setBucketPolicy = promisify(Client.prototype.setBucketPolicy) -Client.prototype.removeIncompleteUpload = promisify(Client.prototype.removeIncompleteUpload) -Client.prototype.getBucketVersioning = promisify(Client.prototype.getBucketVersioning) -Client.prototype.setBucketVersioning = promisify(Client.prototype.setBucketVersioning) -Client.prototype.setBucketTagging = promisify(Client.prototype.setBucketTagging) -Client.prototype.removeBucketTagging = promisify(Client.prototype.removeBucketTagging) -Client.prototype.setObjectTagging = promisify(Client.prototype.setObjectTagging) -Client.prototype.removeObjectTagging = promisify(Client.prototype.removeObjectTagging) -Client.prototype.setBucketLifecycle = promisify(Client.prototype.setBucketLifecycle) -Client.prototype.getBucketLifecycle = promisify(Client.prototype.getBucketLifecycle) -Client.prototype.removeBucketLifecycle = promisify(Client.prototype.removeBucketLifecycle) -Client.prototype.putObjectRetention = promisify(Client.prototype.putObjectRetention) -Client.prototype.getObjectRetention = promisify(Client.prototype.getObjectRetention) -Client.prototype.setBucketEncryption = promisify(Client.prototype.setBucketEncryption) -Client.prototype.getBucketEncryption = promisify(Client.prototype.getBucketEncryption) -Client.prototype.removeBucketEncryption = promisify(Client.prototype.removeBucketEncryption) -Client.prototype.composeObject = promisify(Client.prototype.composeObject) -Client.prototype.selectObjectContent = promisify(Client.prototype.selectObjectContent) - -// refactored API use promise internally -Client.prototype.removeObject = callbackify(Client.prototype.removeObject) -Client.prototype.statObject = callbackify(Client.prototype.statObject) -Client.prototype.removeBucket = callbackify(Client.prototype.removeBucket) -Client.prototype.listBuckets = callbackify(Client.prototype.listBuckets) -Client.prototype.removeBucketReplication = callbackify(Client.prototype.removeBucketReplication) -Client.prototype.setBucketReplication = callbackify(Client.prototype.setBucketReplication) -Client.prototype.getBucketReplication = callbackify(Client.prototype.getBucketReplication) -Client.prototype.getObjectLegalHold = callbackify(Client.prototype.getObjectLegalHold) -Client.prototype.setObjectLegalHold = callbackify(Client.prototype.setObjectLegalHold) -Client.prototype.getBucketTagging = callbackify(Client.prototype.getBucketTagging) -Client.prototype.getObjectTagging = callbackify(Client.prototype.getObjectTagging) -Client.prototype.setObjectLockConfig = callbackify(Client.prototype.setObjectLockConfig) -Client.prototype.getObjectLockConfig = callbackify(Client.prototype.getObjectLockConfig) diff --git a/src/minio.ts b/src/minio.ts new file mode 100644 index 00000000..7471a125 --- /dev/null +++ b/src/minio.ts @@ -0,0 +1,32 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export { AssumeRoleProvider } from './AssumeRoleProvider.ts' +export { CredentialProvider } from './CredentialProvider.ts' +export { Credentials } from './Credentials.ts' +export { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, ENCRYPTION_TYPES } from './helpers.ts' +export { CopyConditions } from './internal/copy-conditions.ts' +export { PostPolicy } from './internal/post-policy.ts' +export type { NotificationEvent, NotificationRecord } from './notification.ts' +export { + buildARN, + CloudFunctionConfig, + NotificationConfig, + NotificationPoller, + QueueConfig, + TopicConfig, +} from './notification.ts' +export { Client } from './typed-client2.ts' diff --git a/src/notification.js b/src/notification.js deleted file mode 100644 index 2c3b45c3..00000000 --- a/src/notification.js +++ /dev/null @@ -1,201 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { EventEmitter } from 'node:events' - -import { DEFAULT_REGION } from './helpers.ts' -import { pipesetup, uriEscape } from './internal/helper.ts' -import * as transformers from './transformers.js' - -// Notification config - array of target configs. -// Target configs can be -// 1. Topic (simple notification service) -// 2. Queue (simple queue service) -// 3. CloudFront (lambda function) -export class NotificationConfig { - add(target) { - let instance = '' - if (target instanceof TopicConfig) { - instance = 'TopicConfiguration' - } - if (target instanceof QueueConfig) { - instance = 'QueueConfiguration' - } - if (target instanceof CloudFunctionConfig) { - instance = 'CloudFunctionConfiguration' - } - if (!this[instance]) { - this[instance] = [] - } - this[instance].push(target) - } -} - -// Base class for three supported configs. -class TargetConfig { - setId(id) { - this.Id = id - } - addEvent(newevent) { - if (!this.Event) { - this.Event = [] - } - this.Event.push(newevent) - } - addFilterSuffix(suffix) { - if (!this.Filter) { - this.Filter = { S3Key: { FilterRule: [] } } - } - this.Filter.S3Key.FilterRule.push({ Name: 'suffix', Value: suffix }) - } - addFilterPrefix(prefix) { - if (!this.Filter) { - this.Filter = { S3Key: { FilterRule: [] } } - } - this.Filter.S3Key.FilterRule.push({ Name: 'prefix', Value: prefix }) - } -} - -// 1. Topic (simple notification service) -export class TopicConfig extends TargetConfig { - constructor(arn) { - super() - this.Topic = arn - } -} - -// 2. Queue (simple queue service) -export class QueueConfig extends TargetConfig { - constructor(arn) { - super() - this.Queue = arn - } -} - -// 3. CloudFront (lambda function) -export class CloudFunctionConfig extends TargetConfig { - constructor(arn) { - super() - this.CloudFunction = arn - } -} - -export const buildARN = (partition, service, region, accountId, resource) => { - return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource -} - -export const ObjectCreatedAll = 's3:ObjectCreated:*' -export const ObjectCreatedPut = 's3:ObjectCreated:Put' -export const ObjectCreatedPost = 's3:ObjectCreated:Post' -export const ObjectCreatedCopy = 's3:ObjectCreated:Copy' -export const ObjectCreatedCompleteMultipartUpload = 's3:ObjectCreated:CompleteMultipartUpload' -export const ObjectRemovedAll = 's3:ObjectRemoved:*' -export const ObjectRemovedDelete = 's3:ObjectRemoved:Delete' -export const ObjectRemovedDeleteMarkerCreated = 's3:ObjectRemoved:DeleteMarkerCreated' -export const ObjectReducedRedundancyLostObject = 's3:ReducedRedundancyLostObject' - -// Poll for notifications, used in #listenBucketNotification. -// Listening constitutes repeatedly requesting s3 whether or not any -// changes have occurred. -export class NotificationPoller extends EventEmitter { - constructor(client, bucketName, prefix, suffix, events) { - super() - - this.client = client - this.bucketName = bucketName - this.prefix = prefix - this.suffix = suffix - this.events = events - - this.ending = false - } - - // Starts the polling. - start() { - this.ending = false - - process.nextTick(() => { - this.checkForChanges() - }) - } - - // Stops the polling. - stop() { - this.ending = true - } - - checkForChanges() { - // Don't continue if we're looping again but are cancelled. - if (this.ending) { - return - } - - let method = 'GET' - var queries = [] - if (this.prefix) { - var prefix = uriEscape(this.prefix) - queries.push(`prefix=${prefix}`) - } - if (this.suffix) { - var suffix = uriEscape(this.suffix) - queries.push(`suffix=${suffix}`) - } - if (this.events) { - this.events.forEach((s3event) => queries.push('events=' + uriEscape(s3event))) - } - queries.sort() - - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - const region = this.client.region || DEFAULT_REGION - this.client.makeRequest({ method, bucketName: this.bucketName, query }, '', [200], region, true, (e, response) => { - if (e) { - return this.emit('error', e) - } - - let transformer = transformers.getNotificationTransformer() - pipesetup(response, transformer) - .on('data', (result) => { - // Data is flushed periodically (every 5 seconds), so we should - // handle it after flushing from the JSON parser. - let records = result.Records - // If null (= no records), change to an empty array. - if (!records) { - records = [] - } - - // Iterate over the notifications and emit them individually. - records.forEach((record) => { - this.emit('notification', record) - }) - - // If we're done, stop. - if (this.ending) { - response.destroy() - } - }) - .on('error', (e) => this.emit('error', e)) - .on('end', () => { - // Do it again, if we haven't cancelled yet. - process.nextTick(() => { - this.checkForChanges() - }) - }) - }) - } -} diff --git a/src/notification.ts b/src/notification.ts new file mode 100644 index 00000000..325b765d --- /dev/null +++ b/src/notification.ts @@ -0,0 +1,266 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { EventEmitter } from 'eventemitter3' +import jsonLineParser from 'stream-json/jsonl/Parser.js' + +import { DEFAULT_REGION } from './helpers.ts' +import { pipesetup, uriEscape } from './internal/helper.ts' +import type { TypedBase } from './typedBase.ts' + +// TODO: type this + +type Event = unknown + +// Base class for three supported configs. +export class TargetConfig { + private Filter?: { S3Key: { FilterRule: { Name: string; Value: string }[] } } + private Event?: Event[] + private Id: any + + setId(id: any) { + this.Id = id + } + + addEvent(newevent: Event) { + if (!this.Event) { + this.Event = [] + } + this.Event.push(newevent) + } + + addFilterSuffix(suffix: string) { + if (!this.Filter) { + this.Filter = { S3Key: { FilterRule: [] } } + } + this.Filter.S3Key.FilterRule.push({ Name: 'suffix', Value: suffix }) + } + + addFilterPrefix(prefix: string) { + if (!this.Filter) { + this.Filter = { S3Key: { FilterRule: [] } } + } + this.Filter.S3Key.FilterRule.push({ Name: 'prefix', Value: prefix }) + } +} + +// 1. Topic (simple notification service) +export class TopicConfig extends TargetConfig { + private Topic: string + + constructor(arn: string) { + super() + this.Topic = arn + } +} + +// 2. Queue (simple queue service) +export class QueueConfig extends TargetConfig { + private Queue: string + + constructor(arn: string) { + super() + this.Queue = arn + } +} + +// 3. CloudFront (lambda function) +export class CloudFunctionConfig extends TargetConfig { + private CloudFunction: string + + constructor(arn: string) { + super() + this.CloudFunction = arn + } +} + +// Notification config - array of target configs. +// Target configs can be +// 1. Topic (simple notification service) +// 2. Queue (simple queue service) +// 3. CloudFront (lambda function) +export class NotificationConfig { + private TopicConfiguration?: TargetConfig[] + private CloudFunctionConfiguration?: TargetConfig[] + private QueueConfiguration?: TargetConfig[] + + add(target: TargetConfig) { + let instance: TargetConfig[] | undefined + if (target instanceof TopicConfig) { + instance = this.TopicConfiguration ??= [] + } + if (target instanceof QueueConfig) { + instance = this.QueueConfiguration ??= [] + } + if (target instanceof CloudFunctionConfig) { + instance = this.CloudFunctionConfiguration ??= [] + } + if (instance) { + instance.push(target) + } + } +} + +export const buildARN = (partition: string, service: string, region: string, accountId: string, resource: string) => { + return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource +} +export const ObjectCreatedAll = 's3:ObjectCreated:*' +export const ObjectCreatedPut = 's3:ObjectCreated:Put' +export const ObjectCreatedPost = 's3:ObjectCreated:Post' +export const ObjectCreatedCopy = 's3:ObjectCreated:Copy' +export const ObjectCreatedCompleteMultipartUpload = 's3:ObjectCreated:CompleteMultipartUpload' +export const ObjectRemovedAll = 's3:ObjectRemoved:*' +export const ObjectRemovedDelete = 's3:ObjectRemoved:Delete' +export const ObjectRemovedDeleteMarkerCreated = 's3:ObjectRemoved:DeleteMarkerCreated' +export const ObjectReducedRedundancyLostObject = 's3:ReducedRedundancyLostObject' +export type NotificationEvent = + | 's3:ObjectCreated:*' + | 's3:ObjectCreated:Put' + | 's3:ObjectCreated:Post' + | 's3:ObjectCreated:Copy' + | 's3:ObjectCreated:CompleteMultipartUpload' + | 's3:ObjectRemoved:*' + | 's3:ObjectRemoved:Delete' + | 's3:ObjectRemoved:DeleteMarkerCreated' + | 's3:ReducedRedundancyLostObject' + | 's3:TestEvent' + | 's3:ObjectRestore:Post' + | 's3:ObjectRestore:Completed' + | 's3:Replication:OperationFailedReplication' + | 's3:Replication:OperationMissedThreshold' + | 's3:Replication:OperationReplicatedAfterThreshold' + | 's3:Replication:OperationNotTracked' + | string // put string at least so auto-complete could work + +// TODO: type this +export type NotificationRecord = unknown +// Poll for notifications, used in #listenBucketNotification. +// Listening constitutes repeatedly requesting s3 whether or not any +// changes have occurred. +export class NotificationPoller extends EventEmitter<{ + notification: (event: NotificationRecord) => void + error: (error: unknown) => void +}> { + private client: TypedBase + private bucketName: string + private prefix: string + private suffix: string + private events: NotificationEvent[] + private ending: boolean + + constructor(client: TypedBase, bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) { + super() + + this.client = client + this.bucketName = bucketName + this.prefix = prefix + this.suffix = suffix + this.events = events + + this.ending = false + } + + // Starts the polling. + start() { + this.ending = false + + process.nextTick(() => { + this.checkForChanges() + }) + } + + // Stops the polling. + stop() { + this.ending = true + } + + checkForChanges() { + // Don't continue if we're looping again but are cancelled. + if (this.ending) { + return + } + + const method = 'GET' + const queries = [] + if (this.prefix) { + const prefix = uriEscape(this.prefix) + queries.push(`prefix=${prefix}`) + } + if (this.suffix) { + const suffix = uriEscape(this.suffix) + queries.push(`suffix=${suffix}`) + } + if (this.events) { + this.events.forEach((s3event) => queries.push('events=' + uriEscape(s3event))) + } + queries.sort() + + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + const region = this.client.region || DEFAULT_REGION + + this.client + .makeRequestAsync( + { + method, + bucketName: this.bucketName, + query, + }, + '', + [200], + region, + true, + ) + .then( + (response) => { + const asm = jsonLineParser.make() + + pipesetup(response, asm) + .on('data', (data) => { + // Data is flushed periodically (every 5 seconds), so we should + // handle it after flushing from the JSON parser. + let records = data.value.Records + // If null (= no records), change to an empty array. + if (!records) { + records = [] + } + + // Iterate over the notifications and emit them individually. + records.forEach((record: NotificationRecord) => { + this.emit('notification', record) + }) + + // If we're done, stop. + if (this.ending) { + response?.destroy() + } + }) + .on('error', (e) => this.emit('error', e)) + .on('end', () => { + // Do it again, if we haven't cancelled yet. + process.nextTick(() => { + this.checkForChanges() + }) + }) + }, + (e) => { + return this.emit('error', e) + }, + ) + } +} diff --git a/src/object-uploader.js b/src/object-uploader.js deleted file mode 100644 index c31466f2..00000000 --- a/src/object-uploader.js +++ /dev/null @@ -1,289 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as Crypto from 'node:crypto' -import { Transform } from 'node:stream' - -import * as querystring from 'query-string' - -import { getVersionId, sanitizeETag } from './internal/helper.ts' - -// We extend Transform because Writable does not implement ._flush(). -export class ObjectUploader extends Transform { - constructor(client, bucketName, objectName, partSize, metaData, callback) { - super() - this.emptyStream = true - this.client = client - this.bucketName = bucketName - this.objectName = objectName - // The size of each multipart, chunked by BlockStream2. - this.partSize = partSize - // This is the metadata for the object. - this.metaData = metaData - - // Call like: callback(error, {etag, versionId}). - this.callback = callback - - // We need to keep track of what number chunk/part we're on. This increments - // each time _write() is called. Starts with 1, not 0. - this.partNumber = 1 - - // A list of the previously uploaded chunks, for resuming a file upload. This - // will be null if we aren't resuming an upload. - this.oldParts = null - - // Keep track of the etags for aggregating the chunks together later. Each - // etag represents a single chunk of the file. - this.etags = [] - - // This is for the multipart upload request — if null, we're either not initiated - // yet or we're flushing in one packet. - this.id = null - - // Handle errors. - this.on('error', (err) => { - callback(err) - }) - } - - _transform(chunk, encoding, callback) { - this.emptyStream = false - let method = 'PUT' - let headers = { 'Content-Length': chunk.length } - let md5digest = '' - - // Calculate and set Content-MD5 header if SHA256 is not set. - // This will happen only when there is a secure connection to the s3 server. - if (!this.client.enableSHA256) { - md5digest = Crypto.createHash('md5').update(chunk).digest() - headers['Content-MD5'] = md5digest.toString('base64') - } - // We can flush the object in one packet if it fits in one chunk. This is true - // if the chunk size is smaller than the part size, signifying the end of the - // stream. - if (this.partNumber == 1 && chunk.length < this.partSize) { - // PUT the chunk in a single request — use an empty query. - let options = { - method, - // Set user metadata as this is not a multipart upload - headers: Object.assign({}, this.metaData, headers), - query: '', - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - let result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // Give the etag back, we're done! - - process.nextTick(() => { - this.callback(null, result) - }) - - // Because we're sure the stream has ended, allow it to flush and end. - callback() - }) - - return - } - - // If we aren't flushing in one packet, we need to initiate the multipart upload, - // if it hasn't already been done. The write will be buffered until the upload has been - // initiated. - if (this.id === null) { - this.once('ready', () => { - this._transform(chunk, encoding, callback) - }) - - // Check for an incomplete previous upload. - this.client.findUploadId(this.bucketName, this.objectName, (err, id) => { - if (err) { - return this.emit('error', err) - } - - // If no upload ID exists, initiate a new one. - if (!id) { - this.client.initiateNewMultipartUpload(this.bucketName, this.objectName, this.metaData).then( - (id) => { - this.id = id - - // We are now ready to accept new chunks — this will flush the buffered chunk. - this.emit('ready') - }, - (err) => callback(err), - ) - - return - } - - this.id = id - - // Retrieve the pre-uploaded parts, if we need to resume the upload. - this.client.listParts(this.bucketName, this.objectName, id).then( - (etags) => { - // It is possible for no parts to be already uploaded. - if (!etags) { - etags = [] - } - - // oldParts will become an object, allowing oldParts[partNumber].etag - this.oldParts = etags.reduce(function (prev, item) { - if (!prev[item.part]) { - prev[item.part] = item - } - return prev - }, {}) - - this.emit('ready') - }, - (err) => { - return this.emit('error', err) - }, - ) - }) - - return - } - - // Continue uploading various parts if we have initiated multipart upload. - let partNumber = this.partNumber++ - - // Check to see if we've already uploaded this chunk. If the hash sums match, - // we can skip to the next chunk. - if (this.oldParts) { - let oldPart = this.oldParts[partNumber] - - // Calulcate the md5 hash, if it has not already been calculated. - if (!md5digest) { - md5digest = Crypto.createHash('md5').update(chunk).digest() - } - - if (oldPart && md5digest.toString('hex') === oldPart.etag) { - // The md5 matches, the chunk has already been uploaded. - this.etags.push({ part: partNumber, etag: oldPart.etag }) - - callback() - return - } - } - - // Write the chunk with an uploader. - let query = querystring.stringify({ - partNumber: partNumber, - uploadId: this.id, - }) - - let options = { - method, - query, - headers, - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - - // In order to aggregate the parts together, we need to collect the etags. - let etag = response.headers.etag - if (etag) { - etag = etag.replace(/^"/, '').replace(/"$/, '') - } - - this.etags.push({ part: partNumber, etag }) - - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // We're ready for the next chunk. - callback() - }) - } - - _flush(callback) { - if (this.emptyStream) { - let method = 'PUT' - let headers = Object.assign({}, this.metaData, { 'Content-Length': 0 }) - let options = { - method, - headers, - query: '', - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, '', [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - - let result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // Give the etag back, we're done! - process.nextTick(() => { - this.callback(null, result) - }) - - // Because we're sure the stream has ended, allow it to flush and end. - callback() - }) - - return - } - // If it has been uploaded in a single packet, we don't have to do anything. - if (this.id === null) { - return - } - - // This is called when all of the chunks uploaded successfully, thus - // completing the multipart upload. - this.client.completeMultipartUpload(this.bucketName, this.objectName, this.id, this.etags, (err, etag) => { - if (err) { - return callback(err) - } - - // Call our callback on the next tick to allow the streams infrastructure - // to finish what its doing before we continue. - process.nextTick(() => { - this.callback(null, etag) - }) - - callback() - }) - } -} - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default ObjectUploader diff --git a/src/promisify.js b/src/promisify.js deleted file mode 100644 index 1f68464a..00000000 --- a/src/promisify.js +++ /dev/null @@ -1,31 +0,0 @@ -// Returns a wrapper function that will promisify a given callback function. -// It will preserve 'this'. -export function promisify(fn) { - return function () { - // If the last argument is a function, assume its the callback. - let callback = arguments[arguments.length - 1] - - // If the callback is given, don't promisify, just pass straight in. - if (typeof callback === 'function') { - return fn.apply(this, arguments) - } - - // Otherwise, create a new set of arguments, and wrap - // it in a promise. - let args = [...arguments] - - return new Promise((resolve, reject) => { - // Add the callback function. - args.push((err, value) => { - if (err) { - return reject(err) - } - - resolve(value) - }) - - // Call the function with our special adaptor callback added. - fn.apply(this, args) - }) - } -} diff --git a/src/transformers.js b/src/transformers.js deleted file mode 100644 index b12f27eb..00000000 --- a/src/transformers.js +++ /dev/null @@ -1,168 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as Crypto from 'node:crypto' - -import JSONParser from 'json-stream' -import Through2 from 'through2' - -import { isFunction } from './internal/helper.ts' -import * as xmlParsers from './xml-parsers.js' - -// getConcater returns a stream that concatenates the input and emits -// the concatenated output when 'end' has reached. If an optional -// parser function is passed upon reaching the 'end' of the stream, -// `parser(concatenated_data)` will be emitted. -export function getConcater(parser, emitError) { - var objectMode = false - var bufs = [] - - if (parser && !isFunction(parser)) { - throw new TypeError('parser should be of type "function"') - } - - if (parser) { - objectMode = true - } - - return Through2( - { objectMode }, - function (chunk, enc, cb) { - bufs.push(chunk) - cb() - }, - function (cb) { - if (emitError) { - cb(parser(Buffer.concat(bufs).toString())) - // cb(e) would mean we have to emit 'end' by explicitly calling this.push(null) - this.push(null) - return - } - if (bufs.length) { - if (parser) { - this.push(parser(Buffer.concat(bufs).toString())) - } else { - this.push(Buffer.concat(bufs)) - } - } - cb() - }, - ) -} - -// A through stream that calculates md5sum and sha256sum -export function getHashSummer(enableSHA256) { - var md5 = Crypto.createHash('md5') - var sha256 = Crypto.createHash('sha256') - - return Through2.obj( - function (chunk, enc, cb) { - if (enableSHA256) { - sha256.update(chunk) - } else { - md5.update(chunk) - } - cb() - }, - function (cb) { - var md5sum = '' - var sha256sum = '' - if (enableSHA256) { - sha256sum = sha256.digest('hex') - } else { - md5sum = md5.digest('base64') - } - var hashData = { md5sum, sha256sum } - this.push(hashData) - this.push(null) - cb() - }, - ) -} - -// Following functions return a stream object that parses XML -// and emits suitable Javascript objects. - -// Parses CopyObject response. -export function getCopyObjectTransformer() { - return getConcater(xmlParsers.parseCopyObject) -} - -// Parses listMultipartUploads response. -export function getListMultipartTransformer() { - return getConcater(xmlParsers.parseListMultipart) -} - -// Parses listObjects response. -export function getListObjectsTransformer() { - return getConcater(xmlParsers.parseListObjects) -} - -// Parses listObjects response. -export function getListObjectsV2Transformer() { - return getConcater(xmlParsers.parseListObjectsV2) -} - -// Parses listObjects with metadata response. -export function getListObjectsV2WithMetadataTransformer() { - return getConcater(xmlParsers.parseListObjectsV2WithMetadata) -} - -// Parses completeMultipartUpload response. -export function getCompleteMultipartTransformer() { - return getConcater(xmlParsers.parseCompleteMultipart) -} - -// Parses GET/SET BucketNotification response -export function getBucketNotificationTransformer() { - return getConcater(xmlParsers.parseBucketNotification) -} - -// Parses a notification. -export function getNotificationTransformer() { - // This will parse and return each object. - return new JSONParser() -} - -export function bucketVersioningTransformer() { - return getConcater(xmlParsers.parseBucketVersioningConfig) -} - -export function lifecycleTransformer() { - return getConcater(xmlParsers.parseLifecycleConfig) -} - -export function objectRetentionTransformer() { - return getConcater(xmlParsers.parseObjectRetentionConfig) -} -export function bucketEncryptionTransformer() { - return getConcater(xmlParsers.parseBucketEncryptionConfig) -} - -export function objectLegalHoldTransformer() { - return getConcater(xmlParsers.parseObjectLegalHoldConfig) -} - -export function uploadPartTransformer() { - return getConcater(xmlParsers.uploadPartParser) -} -export function selectObjectContentTransformer() { - return getConcater() -} - -export function removeObjectsTransformer() { - return getConcater(xmlParsers.removeObjectsParser) -} diff --git a/src/transformers.ts b/src/transformers.ts new file mode 100644 index 00000000..4ba44460 --- /dev/null +++ b/src/transformers.ts @@ -0,0 +1,161 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as crypto from 'node:crypto' +import type { ServerResponse } from 'node:http' +import type * as stream from 'node:stream' + +import Through2 from 'through2' + +import * as errors from './errors.ts' +import { isFunction } from './internal/helper.ts' +import * as xmlParsers from './xml-parsers.ts' + +// getConcater returns a stream that concatenates the input and emits +// the concatenated output when 'end' has reached. If an optional +// parser function is passed upon reaching the 'end' of the stream, +// `parser(concatenated_data)` will be emitted. +export function getConcater(parser?: undefined | ((xml: string) => any), emitError?: boolean): stream.Transform { + let objectMode = false + const bufs: Buffer[] = [] + + if (parser && !isFunction(parser)) { + throw new TypeError('parser should be of type "function"') + } + + if (parser) { + objectMode = true + } + + return Through2( + { objectMode }, + function (chunk, enc, cb) { + bufs.push(chunk) + cb() + }, + function (cb) { + if (emitError) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + cb(parser(Buffer.concat(bufs).toString())) + // cb(e) would mean we have to emit 'end' by explicitly calling this.push(null) + this.push(null) + return + } + if (bufs.length) { + if (parser) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.push(parser(Buffer.concat(bufs).toString())) + } else { + this.push(Buffer.concat(bufs)) + } + } + cb() + }, + ) +} + +// Generates an Error object depending on http statusCode and XML body +export function getErrorTransformer(response: ServerResponse) { + const statusCode = response.statusCode + let code: string, message: string + if (statusCode === 301) { + code = 'MovedPermanently' + message = 'Moved Permanently' + } else if (statusCode === 307) { + code = 'TemporaryRedirect' + message = 'Are you using the correct endpoint URL?' + } else if (statusCode === 403) { + code = 'AccessDenied' + message = 'Valid and authorized credentials required' + } else if (statusCode === 404) { + code = 'NotFound' + message = 'Not Found' + } else if (statusCode === 405) { + code = 'MethodNotAllowed' + message = 'Method Not Allowed' + } else if (statusCode === 501) { + code = 'MethodNotAllowed' + message = 'Method Not Allowed' + } else { + code = 'UnknownError' + message = `${statusCode}` + } + + const headerInfo: Record = {} + // A value created by S3 compatible server that uniquely identifies the request. + headerInfo.amzRequestid = response.headersSent ? (response.getHeader('x-amz-request-id') as string | undefined) : null + // A special token that helps troubleshoot API replies and issues. + headerInfo.amzId2 = response.headersSent ? (response.getHeader('x-amz-id-2') as string | undefined) : null + // Region where the bucket is located. This header is returned only + // in HEAD bucket and ListObjects response. + headerInfo.amzBucketRegion = response.headersSent + ? (response.getHeader('x-amz-bucket-region') as string | undefined) + : null + + return getConcater((xmlString) => { + const getError = () => { + // Message should be instantiated for each S3Errors. + const e = new errors.S3Error(message, { cause: headerInfo }) + // S3 Error code. + e.code = code + Object.entries(headerInfo).forEach(([key, value]) => { + // @ts-expect-error force set error properties + e[key] = value + }) + return e + } + if (!xmlString) { + return getError() + } + let e + try { + e = xmlParsers.parseError(xmlString, headerInfo) + } catch (ex) { + return getError() + } + return e + }, true) +} + +export function hashBinary(buf: Buffer, enableSHA256: boolean) { + let sha256sum = '' + if (enableSHA256) { + sha256sum = crypto.createHash('sha256').update(buf).digest('hex') + } + const md5sum = crypto.createHash('md5').update(buf).digest('base64') + + return { md5sum, sha256sum } +} + +// Following functions return a stream object that parses XML +// and emits suitable Javascript objects. + +// Parses listMultipartUploads response. +export function getListMultipartTransformer() { + return getConcater(xmlParsers.parseListMultipart) +} + +// Parses listObjects response. +export function getListObjectsV2Transformer() { + return getConcater(xmlParsers.parseListObjectsV2) +} + +// Parses listObjects with metadata response. +export function getListObjectsV2WithMetadataTransformer() { + return getConcater(xmlParsers.parseListObjectsV2WithMetadata) +} diff --git a/src/typed-client.ts b/src/typed-client.ts new file mode 100644 index 00000000..e1e85802 --- /dev/null +++ b/src/typed-client.ts @@ -0,0 +1,1707 @@ +import * as stream from 'node:stream' + +import queryString from 'query-string' +import { TextEncoder } from 'web-encoding' +import xml2js from 'xml2js' + +import * as errors from './errors.ts' +import type { SelectResults } from './helpers.ts' +import { LEGAL_HOLD_STATUS, RETENTION_MODES } from './helpers.ts' +import { asCallback, asCallbackFn } from './internal/as-callback.ts' +import { fsp } from './internal/async.ts' +import { + getScope, + insertContentType, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isString, + isValidBucketName, + isValidDate, + isValidObjectName, + isValidPrefix, + makeDateLong, + prependXAMZMeta, + toMd5, + uriEscape, +} from './internal/helper.ts' +import { PostPolicy } from './internal/post-policy.ts' +import { readAsBuffer } from './internal/response.ts' +import type { + BucketStream, + EncryptionConfig, + LegalHoldOptions, + Lifecycle, + ListObjectV1Opt, + NoResultCallback, + ObjectMetaData, + PostPolicyResult, + RemoveOptions, + RequestHeaders, + ResultCallback, + Retention, + SelectOptions, + Tag, + TagList, + UploadedObjectInfo, + VersionConfigInput, + VersionIdentification, + VersioningConfig, +} from './internal/type.ts' +import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' +import * as transformers from './transformers.ts' +import type { RequestMethod, RequestOption } from './typedBase.ts' +import { findCallback, TypedBase } from './typedBase.ts' +import type { S3ListObject } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' +import { parseSelectObjectContentResponse } from './xml-parsers.ts' + +export class TypedClient extends TypedBase { + getBucketVersioning(bucketName: string, callback: ResultCallback): void + getBucketVersioning(bucketName: string): Promise + + getBucketVersioning(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + const method = 'GET' + const query = 'versioning' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketVersioningConfig(body.toString()) + }) + } + + setBucketVersioning(bucketName: string, versioningConfig: VersionConfigInput, callback: NoResultCallback): void + setBucketVersioning(bucketName: string, versioningConfig: VersionConfigInput): Promise + setBucketVersioning( + bucketName: string, + versionConfig: VersionConfigInput, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!Object.keys(versionConfig).length) { + throw new errors.InvalidArgumentError('versionConfig should be of type "object"') + } + + const method = 'PUT' + const query = 'versioning' + const builder = new xml2js.Builder({ + rootName: 'VersioningConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(versionConfig) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, payload) + }) + } + + /** + * Set the policy on a bucket or an object prefix. + * + * @param bucketName - name of the bucket + * @param bucketPolicy - bucket policy (JSON stringify'ed) + */ + setBucketPolicy(bucketName: string, bucketPolicy: string): Promise + setBucketPolicy(bucketName: string, bucketPolicy: string, callback: NoResultCallback): void + + setBucketPolicy(bucketName: string, policy: string, cb?: NoResultCallback): void | Promise { + // Validate arguments. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isString(policy)) { + throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`) + } + + let method: RequestMethod = 'DELETE' + const query = 'policy' + + if (policy) { + method = 'PUT' + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + }, + policy, + [204], + '', + ) + }) + } + + /** + * Set the policy on a bucket or an object prefix. + */ + getBucketPolicy(bucketName: string, callback: ResultCallback): void + getBucketPolicy(bucketName: string): Promise + + getBucketPolicy(bucketName: string, cb?: ResultCallback): void | Promise { + // Validate arguments. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + const method = 'GET' + const query = 'policy' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }, '', [200], '') + const body = await readAsBuffer(res) + return body.toString() + }) + } + + /** + * Get Tags associated with a Bucket + */ + getBucketTagging(bucketName: string, callback: ResultCallback): void + getBucketTagging(bucketName: string): Promise + + getBucketTagging(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + const method = 'GET' + const query = 'tagging' + const requestOptions: RequestOption = { method, bucketName, query } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseTagging(body.toString()) + }) + } + + /** Remove Tags on an Bucket/Object based on params + * __Arguments__ + * bucketName _string_ + * objectName _string_ (optional) + * removeOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + protected async removeTagging({ + bucketName, + objectName, + removeOpts, + }: { + removeOpts?: { versionId?: string } + bucketName: string + objectName?: string + }) { + const method = 'DELETE' + let query = 'tagging' + + if (removeOpts && removeOpts.versionId) { + query = `${query}&versionId=${removeOpts.versionId}` + } + const requestOptions: RequestOption = { method, bucketName, objectName, query } + + if (objectName) { + requestOptions['objectName'] = objectName + } + + await this.makeRequestAsync(requestOptions, '', [200, 204], '') + } + + /** + * Remove Tags associated with a bucket + */ + removeBucketTagging(bucketName: string, callback: NoResultCallback): void + removeBucketTagging(bucketName: string): Promise + + /** Remove Tags associated with a bucket + * __Arguments__ + * bucketName _string_ + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeBucketTagging(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + return asCallback(cb, this.removeTagging({ bucketName })) + } + + /** + * Set Tags on a Bucket + * + */ + setBucketTagging(bucketName: string, tags: TagList, callback: NoResultCallback): void + setBucketTagging(bucketName: string, tags: TagList): Promise + + setBucketTagging(bucketName: string, tags: TagList, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(tags)) { + throw new errors.InvalidArgumentError('tags should be of type "object"') + } + if (Object.keys(tags).length > 10) { + throw new errors.InvalidArgumentError('maximum tags allowed is 10"') + } + + return asCallback(cb, this.setTagging({ bucketName, tags })) + } + + getBucketLifecycle(bucketName: string, callback: ResultCallback): void + getBucketLifecycle(bucketName: string): Promise + + /** + * Get lifecycle configuration on a bucket. + */ + getBucketLifecycle(bucketName: string, cb?: ResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'GET' + const query = 'lifecycle' + const requestOptions: RequestOption = { method, bucketName, query } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseLifecycleConfig(body.toString()) + }) + } + + removeBucketLifecycle(bucketName: string, callback: NoResultCallback): void + removeBucketLifecycle(bucketName: string): Promise + + /** + * Remove lifecycle configuration of a bucket. + */ + removeBucketLifecycle(bucketName: string, cb?: NoResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'DELETE' + const query = 'lifecycle' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [204]) + }) + } + + // presignedPostPolicy can be used in situations where we want more control on the upload than what + // presignedPutObject() provides. i.e Using presignedPostPolicy we will be able to put policy restrictions + + // return PostPolicy object + newPostPolicy() { + return new PostPolicy() + } + + /** + * Put lifecycle configuration on a bucket. + * Apply lifecycle configuration on a bucket. + * + * this method is not documented yet so it's marked as `protected`, ts will not emit it in type definition + * + * @param bucketName + * @param policyConfig - a valid policy configuration object. + */ + protected async applyBucketLifecycle(bucketName: string, policyConfig: Lifecycle): Promise { + const method = 'PUT' + const query = 'lifecycle' + + const encoder = new TextEncoder() + const builder = new xml2js.Builder({ + rootName: 'LifecycleConfiguration', + headless: true, + renderOpts: { pretty: false }, + }) + + const payload = Buffer.from(encoder.encode(builder.buildObject(policyConfig))) + const headers: RequestHeaders = { 'Content-MD5': toMd5(payload) } + await this.makeRequestAsyncOmit({ method, bucketName, query, headers }, payload) + } + + /** Set/Override lifecycle configuration on a bucket. if the configuration is empty, it removes the configuration. + * + * @param bucketName + * @param lifecycleConfig - null or empty object will remove bucket life cycle + * @param callback - if no callback, a promise will be returned + */ + setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle | null, callback: NoResultCallback): void + setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle | null): Promise + + setBucketLifecycle(bucketName: string, lifeCycleConfig: Lifecycle | null = null, cb?: NoResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + return asCallbackFn(cb, async () => { + if (isEmpty(lifeCycleConfig)) { + await this.removeBucketLifecycle(bucketName) + } else { + await this.applyBucketLifecycle(bucketName, lifeCycleConfig) + } + }) + } + + // List the objects in the bucket. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `listOpts _object_: query params to list object with below keys + // * listOpts.MaxKeys _int_ maximum number of keys to return + // * listOpts.IncludeVersion _bool_ true|false to include versions. + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + // * `obj.lastModified` _Date_: modified time stamp + // * `obj.isDeleteMarker` _boolean_: true if it is a delete marker + + listObjects( + bucketName: string, + prefix: string, + recursive: boolean, + listOpts: { + MaxKeys?: number + IncludeVersion?: boolean + } = {}, + ): BucketStream { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isObject(listOpts)) { + throw new TypeError('listOpts should be of type "object"') + } + const listQueryOpts = { + Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/' + MaxKeys: 1000, + IncludeVersion: listOpts.IncludeVersion, + } + let objects: S3ListObject[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + + let marker = '' + // eslint-disable-next-line @typescript-eslint/no-misused-promises + readStream._read = async () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + + try { + const result = await this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts) + while (!ended) { + if (result.isTruncated) { + marker = result.nextMarker || (result.versionIdMarker as string) + } else { + ended = true + } + objects = result.objects + // @ts-expect-error next read + readStream._read() + } + } catch (e) { + readStream.emit('error', e) + } + } + + return readStream + } + + // list a batch of objects + protected async listObjectsQuery( + bucketName: string, + prefix: string, + marker: string, + { + Delimiter, + MaxKeys, + IncludeVersion, + }: Partial> & Required>, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(marker)) { + throw new TypeError('marker should be of type "string"') + } + + if (!isString(Delimiter)) { + throw new TypeError('Delimiter should be of type "string"') + } + if (!isNumber(MaxKeys)) { + throw new TypeError('MaxKeys should be of type "number"') + } + + const queries = [] + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(Delimiter)}`) + queries.push(`encoding-type=url`) + + if (IncludeVersion) { + queries.push(`versions`) + } + + if (marker) { + marker = uriEscape(marker) + if (IncludeVersion) { + queries.push(`key-marker=${marker}`) + } else { + queries.push(`marker=${marker}`) + } + } + + // no need to escape maxKeys + if (MaxKeys) { + if (MaxKeys >= 1000) { + MaxKeys = 1000 + } + queries.push(`max-keys=${MaxKeys}`) + } + queries.sort() + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + + const method = 'GET' + + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + + return xmlParsers.parseListObjects(body.toString()) + } + + putObjectRetention(bucketName: string, objectName: string, callback: NoResultCallback): void + putObjectRetention( + bucketName: string, + objectName: string, + retentionOptions: Retention, + callback: NoResultCallback, + ): void + putObjectRetention(bucketName: string, objectName: string, retentionOptions?: Retention): Promise + + putObjectRetention( + bucketName: string, + objectName: string, + retentionOptsOrCallback?: Retention | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let retentionOpts: Retention = {} + let cb: undefined | NoResultCallback + if (isFunction(retentionOptsOrCallback)) { + cb = retentionOptsOrCallback + } else { + retentionOpts = retentionOptsOrCallback as Retention + cb = callback + } + + if (!isObject(retentionOpts)) { + throw new errors.InvalidArgumentError('retentionOpts should be of type "object"') + } else { + if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) { + throw new errors.InvalidArgumentError(`Invalid value for governanceBypass: ${retentionOpts.governanceBypass}`) + } + if ( + retentionOpts.mode && + ![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode) + ) { + throw new errors.InvalidArgumentError(`Invalid object retention mode: ${retentionOpts.mode}`) + } + if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) { + throw new errors.InvalidArgumentError(`Invalid value for retainUntilDate: ${retentionOpts.retainUntilDate}`) + } + if (retentionOpts.versionId && !isString(retentionOpts.versionId)) { + throw new errors.InvalidArgumentError(`Invalid value for versionId: ${retentionOpts.versionId}`) + } + } + + const method = 'PUT' + let query = 'retention' + + const headers: RequestHeaders = {} + if (retentionOpts.governanceBypass) { + headers['X-Amz-Bypass-Governance-Retention'] = true + } + + const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true }) + const params: Record = {} + + if (retentionOpts.mode) { + params.Mode = retentionOpts.mode + } + if (retentionOpts.retainUntilDate) { + params.RetainUntilDate = retentionOpts.retainUntilDate + } + if (retentionOpts.versionId) { + query += `&versionId=${retentionOpts.versionId}` + } + + const payload = builder.buildObject(params) + + headers['Content-MD5'] = toMd5(payload) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + objectName, + query, + headers, + }, + payload, + [200, 204], + ) + }) + } + + getBucketEncryption(bucketName: string, callback: ResultCallback): void + getBucketEncryption(bucketName: string): Promise + getBucketEncryption(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isOptionalFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'encryption' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketEncryptionConfig(body.toString()) + }) + } + + setBucketEncryption(bucketName: string, encryptionConfig: EncryptionConfig, callback: NoResultCallback): void + setBucketEncryption(bucketName: string, encryptionConfig: EncryptionConfig): Promise + setBucketEncryption( + bucketName: string, + encryptionConfigOrCallback: EncryptionConfig | NoResultCallback | undefined, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + let encryptionConfig: EncryptionConfig | undefined + let cb: NoResultCallback | undefined + + if (isFunction(encryptionConfigOrCallback)) { + cb = encryptionConfigOrCallback + encryptionConfig = undefined + } else { + encryptionConfig = encryptionConfigOrCallback + cb = callback + } + + if (encryptionConfig && encryptionConfig.Rule && encryptionConfig.Rule.length > 1) { + throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed: ' + encryptionConfig.Rule) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + let encryptionObj = encryptionConfig + if (isEmpty(encryptionConfig)) { + encryptionObj = { + // Default MinIO Server Supported Rule + Rule: [ + { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: 'AES256', + }, + }, + ], + } + } + + const method = 'PUT' + const query = 'encryption' + const builder = new xml2js.Builder({ + rootName: 'ServerSideEncryptionConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(encryptionObj) + + const headers: RequestHeaders = {} + headers['Content-MD5'] = toMd5(payload) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + /** + * Remove the specified object. + */ + removeObject(bucketName: string, objectName: string, removeOpts: RemoveOptions, callback: NoResultCallback): void + removeObject(bucketName: string, objectName: string, callback: NoResultCallback): void + removeObject(bucketName: string, objectName: string, removeOpts?: RemoveOptions): Promise + removeObject( + bucketName: string, + objectName: string, + removeOptsOrCallback: RemoveOptions | NoResultCallback = {}, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let removeOpts: RemoveOptions = {} + let cb: NoResultCallback | undefined + + // backward compatibility + if (isFunction(removeOptsOrCallback)) { + cb = removeOptsOrCallback + } else { + removeOpts = removeOptsOrCallback + cb = callback + } + + if (!isObject(removeOpts)) { + throw new errors.InvalidArgumentError('removeOpts should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'DELETE' + const queryParams: Record = {} + + if (removeOpts.versionId) { + queryParams.versionId = `${removeOpts.versionId}` + } + const headers: RequestHeaders = {} + if (removeOpts.governanceBypass) { + headers['X-Amz-Bypass-Governance-Retention'] = true + } + if (removeOpts.forceDelete) { + headers['x-minio-force-delete'] = true + } + + const query = queryString.stringify(queryParams) + + const requestOptions: RequestOption = { method, bucketName, objectName, headers } + if (query) { + requestOptions['query'] = query + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit(requestOptions, '', [200, 204]) + }) + } + + /** + * Generate a generic pre-signed URL which can be used for HTTP methods GET, PUT, HEAD and DELETE + * + * @param httpMethod - name of the HTTP method + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param expires - expiry in seconds (optional, default 7 days) + * @param reqParams - request parameters (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} + * @param requestDate - A date object, the url will be issued at (optional) + */ + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expires?: number, + reqParams?: Record, + requestDate?: Date, + ): Promise + + presignedUrl(httpMethod: string, bucketName: string, objectName: string, callback: ResultCallback): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + reqParams: Record, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + reqParams: Record, + requestDate: Date, + callback: ResultCallback, + ): void + + presignedUrl( + method: 'GET' | 'DELETE' | 'PUT' | 'POST', + bucketName: string, + objectName: string, + // expires?: number, + // reqParams?: Record, + // requestDate?: Date, + // callback?: ResultCallback, + ...originalArgs: unknown[] + ): void | Promise { + if (this.anonymous) { + throw new errors.AnonymousRequestError('Presigned ' + method + ' url cannot be generated for anonymous requests') + } + + let [[expires, reqParams, requestDate], cb] = findCallback< + [number, Record, Date], + ResultCallback + >(originalArgs) + + expires = expires ?? 24 * 60 * 60 * 7 // 7 days in seconds + reqParams = reqParams ?? {} + requestDate = requestDate ?? new Date() + + if (!isNumber(expires)) { + throw new TypeError(`expires should be of type "number", got ${expires}`) + } + if (!isObject(reqParams)) { + throw new TypeError(`reqParams should be of type "object", got ${reqParams}`) + } + if (!isValidDate(requestDate)) { + throw new TypeError(`requestDate should be of type "Date" and valid, got ${requestDate}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const query = queryString.stringify(reqParams) + return asCallbackFn(cb, async () => { + const region = await this.getBucketRegionAsync(bucketName) + + const reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query }) + void this.checkAndRefreshCreds() + return presignSignatureV4( + reqOptions, + this.accessKey, + this.secretKey, + this.sessionToken!, + region, + requestDate, + expires, + ) + }) + } + + /** + * Generate a presigned URL for GET + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param expires - expiry in seconds (optional, default 7 days) + * @param respHeaders - response headers to override or request params for query (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} + * @param requestDate - A date object, the url will be issued at (optional) + */ + presignedGetObject( + bucketName: string, + objectName: string, + expires?: number, + respHeaders?: Record, + requestDate?: Date, + ): Promise + + presignedGetObject(bucketName: string, objectName: string, callback: ResultCallback): void + presignedGetObject(bucketName: string, objectName: string, expires: number, callback: ResultCallback): void + presignedGetObject( + bucketName: string, + objectName: string, + expires: number, + respHeaders: Record, + callback: ResultCallback, + ): void + presignedGetObject( + bucketName: string, + objectName: string, + expires: number, + respHeaders: Record, + requestDate: Date, + callback: ResultCallback, + ): void + + presignedGetObject( + bucketName: string, + objectName: string, + expires?: unknown, + respHeaders?: unknown, + requestDate?: unknown, + cb?: unknown, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (isFunction(respHeaders)) { + cb = respHeaders + respHeaders = {} + requestDate = new Date() + } + + const validRespHeaders = [ + 'response-content-type', + 'response-content-language', + 'response-expires', + 'response-cache-control', + 'response-content-disposition', + 'response-content-encoding', + ] + validRespHeaders.forEach((header) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) { + throw new TypeError(`response header ${header} should be of type "string"`) + } + }) + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore presignedUrl will check type values, just leave it here for future refactor. + return this.presignedUrl('GET', bucketName, objectName, expires as number, respHeaders, requestDate as Date, cb) + } + + presignedPutObject(bucketName: string, objectName: string, callback: ResultCallback): void + presignedPutObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void + presignedPutObject(bucketName: string, objectName: string, expiry?: number): Promise + + // * `expiry` _number_: expiry in seconds (optional, default 7 days) + presignedPutObject( + bucketName: string, + objectName: string, + expires?: number | ResultCallback, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + return this.presignedUrl('PUT', bucketName, objectName, expires as number, cb) + } + + presignedPostPolicy(policy: PostPolicy, callback: ResultCallback): void + presignedPostPolicy(policy: PostPolicy): Promise + presignedPostPolicy(postPolicy: PostPolicy, cb?: ResultCallback): void | Promise { + return asCallbackFn(cb, async () => { + if (this.anonymous) { + throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests') + } + if (!isObject(postPolicy)) { + throw new TypeError('postPolicy should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + // @ts-expect-error index check + const region = await this.getBucketRegionAsync(postPolicy.formData.bucket) + const date = new Date() + const dateStr = makeDateLong(date) + void this.checkAndRefreshCreds() + + if (!postPolicy.policy.expiration) { + // 'expiration' is mandatory field for S3. + // Set default expiration date of 7 days. + const expires = new Date() + expires.setSeconds(24 * 60 * 60 * 7) + postPolicy.setExpires(expires) + } + + postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr]) + postPolicy.formData['x-amz-date'] = dateStr + + postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256']) + postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' + + postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)]) + postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date) + + if (this.sessionToken) { + postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken]) + postPolicy.formData['x-amz-security-token'] = this.sessionToken + } + + const policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64') + + postPolicy.formData.policy = policyBase64 + + postPolicy.formData['x-amz-signature'] = postPresignSignatureV4(region, date, this.secretKey, policyBase64) + const opts: RequestOption = { method: 'POST', region: region, bucketName: postPolicy.formData.bucket } + const reqOptions = this.getRequestOptions(opts) + const portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}` + const urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}` + return { postURL: urlStr, formData: postPolicy.formData } + }) + } + + setObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void + setObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions: VersionIdentification, + callback: NoResultCallback, + ): void + setObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions?: VersionIdentification, + ): Promise + + /** Set Tags on an Object + * __Arguments__ + * bucketName _string_ + * objectName _string_ + * * tags _object_ of the form {'':'','':''} + * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + setObjectTagging( + bucketName: string, + objectName: string, + tagsArg: TagList, + putOptsArg?: VersionIdentification | NoResultCallback, + cbArg?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + let [[tags, putOpts], cb] = findCallback<[TagList, VersionIdentification?], NoResultCallback>([ + tagsArg, + putOptsArg, + cbArg, + ]) + putOpts = putOpts ?? {} + + if (!isObject(tags)) { + throw new errors.InvalidArgumentError('tags should be of type "object"') + } + if (Object.keys(tags).length > 10) { + throw new errors.InvalidArgumentError('Maximum tags allowed is 10"') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallback(cb, this.setTagging({ bucketName, objectName, tags, putOpts })) + } + + /** To set Tags on a bucket or object based on the params + * __Arguments__ + * taggingParams _object_ Which contains the following properties + * bucketName _string_, + * objectName _string_ (Optional), + * tags _object_ of the form {'':'','':''} + * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + async setTagging({ + bucketName, + objectName, + putOpts = {}, + tags, + }: { + tags: TagList + putOpts?: VersionIdentification + bucketName: string + objectName?: string + }): Promise { + const method = 'PUT' + let query = 'tagging' + + if (putOpts && putOpts.versionId) { + query = `${query}&versionId=${putOpts.versionId}` + } + const tagsList = [] + for (const [key, value] of Object.entries(tags)) { + tagsList.push({ Key: key, Value: value }) + } + const taggingConfig = { + Tagging: { + TagSet: { + Tag: tagsList, + }, + }, + } + const encoder = new TextEncoder() + const headers: RequestHeaders = {} + const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } }) + const payload = Buffer.from(encoder.encode(builder.buildObject(taggingConfig))) + headers['Content-MD5'] = toMd5(payload) + const requestOptions: RequestOption = { method, bucketName, query, headers } + + if (objectName) { + requestOptions['objectName'] = objectName + } + headers['Content-MD5'] = toMd5(payload) + + await this.makeRequestAsyncOmit(requestOptions, payload) + } + + removeObjectTagging(bucketName: string, objectName: string, callback: NoResultCallback): void + removeObjectTagging( + bucketName: string, + objectName: string, + removeOptions: VersionIdentification, + callback: NoResultCallback, + ): void + removeObjectTagging(bucketName: string, objectName: string, removeOptions?: VersionIdentification): Promise + + /** Remove tags associated with an object + * __Arguments__ + * bucketName _string_ + * objectName _string_ + * removeOpts _object_ (Optional) e.g. {VersionID:"my-object-version-id"} + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeObjectTagging( + bucketName: string, + objectName: string, + removeOptsArg?: VersionIdentification | NoResultCallback, + cbArg?: NoResultCallback, + ): Promise | void { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + const [[removeOpts], cb] = findCallback<[VersionIdentification?], NoResultCallback>([removeOptsArg, cbArg]) + if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) { + throw new errors.InvalidArgumentError('removeOpts should be of type "object"') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallback(cb, this.removeTagging({ bucketName, objectName, removeOpts })) + } + + selectObjectContent( + bucketName: string, + objectName: string, + selectOpts: SelectOptions, + callback: ResultCallback, + ): void + selectObjectContent(bucketName: string, objectName: string, selectOpts: SelectOptions): Promise + + selectObjectContent( + bucketName: string, + objectName: string, + selectOpts: SelectOptions, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isEmpty(selectOpts)) { + if (!isString(selectOpts.expression)) { + throw new TypeError('sqlExpression should be of type "string"') + } + if (!isEmpty(selectOpts.inputSerialization)) { + if (!isObject(selectOpts.inputSerialization)) { + throw new TypeError('inputSerialization should be of type "object"') + } + } else { + throw new TypeError('inputSerialization is required') + } + if (!isEmpty(selectOpts.outputSerialization)) { + if (!isObject(selectOpts.outputSerialization)) { + throw new TypeError('outputSerialization should be of type "object"') + } + } else { + throw new TypeError('outputSerialization is required') + } + } else { + throw new TypeError('valid select configuration is required') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'POST' + let query = `select` + query += '&select-type=2' + + const config: unknown[] = [ + { + Expression: selectOpts.expression, + }, + { + ExpressionType: selectOpts.expressionType || 'SQL', + }, + { + InputSerialization: [selectOpts.inputSerialization], + }, + { + OutputSerialization: [selectOpts.outputSerialization], + }, + ] + + // Optional + if (selectOpts.requestProgress) { + config.push({ RequestProgress: selectOpts.requestProgress }) + } + // Optional + if (selectOpts.scanRange) { + config.push({ ScanRange: selectOpts.scanRange }) + } + + const builder = new xml2js.Builder({ + rootName: 'SelectObjectContentRequest', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }, payload) + return parseSelectObjectContentResponse(await readAsBuffer(res)) + }) + } + + getObjectRetention( + bucketName: string, + objectName: string, + options: VersionIdentification, + callback: ResultCallback, + ): void + getObjectRetention(bucketName: string, objectName: string, options: VersionIdentification): Promise + + getObjectRetention( + bucketName: string, + objectName: string, + getOpts: VersionIdentification, + cb?: ResultCallback, + ): Promise | void { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(getOpts)) { + throw new errors.InvalidArgumentError('callback should be of type "object"') + } else if (getOpts.versionId && !isString(getOpts.versionId)) { + throw new errors.InvalidArgumentError('VersionID should be of type "string"') + } + if (cb && !isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + let query = 'retention' + if (getOpts.versionId) { + query += `&versionId=${getOpts.versionId}` + } + + return asCallbackFn(cb, async (): Promise => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectRetentionConfig(body.toString()) + }) + } + + getObjectTagging(bucketName: string, objectName: string, callback: ResultCallback): void + getObjectTagging( + bucketName: string, + objectName: string, + getOptions: VersionIdentification, + callback: ResultCallback, + ): void + getObjectTagging(bucketName: string, objectName: string, getOptions?: VersionIdentification): Promise + + getObjectTagging( + bucketName: string, + objectName: string, + getOptsArg?: VersionIdentification | ResultCallback, + cbArg?: ResultCallback, + ): void | Promise { + const method = 'GET' + let query = 'tagging' + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + const [[getOpts = {}], cb] = findCallback<[VersionIdentification | undefined], ResultCallback>([ + getOptsArg, + cbArg, + ]) + + if (!isObject(getOpts)) { + throw new errors.InvalidArgumentError('getOpts should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + if (getOpts && getOpts.versionId) { + query = `${query}&versionId=${getOpts.versionId}` + } + const requestOptions: RequestOption = { method, bucketName, query } + if (objectName) { + requestOptions['objectName'] = objectName + } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseTagging(body.toString()) + }) + } + + getObjectLegalHold(bucketName: string, objectName: string, callback: ResultCallback): void + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptions: VersionIdentification, + callback: ResultCallback, + ): void + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptions?: VersionIdentification, + ): Promise + + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptsArg?: VersionIdentification | ResultCallback, + cbArg?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const [[getOpts = {}], cb] = findCallback<[VersionIdentification], ResultCallback>([ + getOptsArg, + cbArg, + ]) + + if (!isObject(getOpts)) { + throw new TypeError('getOpts should be of type "Object"') + } else if (Object.keys(getOpts).length > 0 && getOpts.versionId && !isString(getOpts.versionId)) { + throw new TypeError('versionId should be of type string.:', getOpts.versionId) + } + + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + const method = 'GET' + let query = 'legal-hold' + + if (getOpts.versionId) { + query += `&versionId=${getOpts.versionId}` + } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectLegalHoldConfig(body.toString()) + }) + } + + setObjectLegalHold(bucketName: string, objectName: string, callback: NoResultCallback): void + setObjectLegalHold( + bucketName: string, + objectName: string, + setOptions: LegalHoldOptions, + callback: NoResultCallback, + ): void + setObjectLegalHold(bucketName: string, objectName: string, setOptions?: LegalHoldOptions): Promise + + setObjectLegalHold( + bucketName: string, + objectName: string, + setOptions?: LegalHoldOptions | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const defaultOpts: LegalHoldOptions = { + status: LEGAL_HOLD_STATUS.ENABLED, + } + + let [[setOpts = defaultOpts], cb] = findCallback<[LegalHoldOptions], NoResultCallback>([setOptions, callback]) + + if (!isObject(setOpts)) { + throw new TypeError('setOpts should be of type "Object"') + } else { + if (![LEGAL_HOLD_STATUS.ENABLED, LEGAL_HOLD_STATUS.DISABLED].includes(setOpts.status)) { + throw new TypeError('Invalid status: ' + setOpts.status) + } + if (setOpts.versionId && !setOpts.versionId.length) { + throw new TypeError('versionId should be of type string.:' + setOpts.versionId) + } + } + + if (!isOptionalFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + if (isEmpty(setOpts)) { + setOpts = defaultOpts + } + + const method = 'PUT' + let query = 'legal-hold' + + if (setOpts.versionId) { + query += `&versionId=${setOpts.versionId}` + } + + const config = { + Status: setOpts.status, + } + + const builder = new xml2js.Builder({ rootName: 'LegalHold', renderOpts: { pretty: false }, headless: true }) + const payload = builder.buildObject(config) + const headers = { + 'Content-MD5': toMd5(payload), + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + objectName, + query, + headers, + }, + payload, + ) + }) + } + + /** + * Internal Method to abort a multipart upload request in case of any errors. + * @param bucketName __string__ Bucket Name + * @param objectName __string__ Object Name + * @param uploadId __string__ id of a multipart upload to cancel during compose object sequence. + */ + protected async abortMultipartUpload(bucketName: string, objectName: string, uploadId: string) { + // TODO: type callback + const method = 'DELETE' + const query = `uploadId=${uploadId}` + + const requestOptions: RequestOption = { method, bucketName, objectName: objectName, query } + await this.makeRequestAsyncOmit(requestOptions, '', [204]) + } + + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + callback: NoResultCallback, + ): void + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + ): Promise + + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!Array.isArray(objectsList)) { + throw new errors.InvalidArgumentError('objectsList should be a list') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const maxEntries = 1000 + const query = 'delete' + const method = 'POST' + + type O = + | string + | { + name: string + versionId?: string + } + + const result = objectsList.reduce( + (result, entry) => { + result.list.push(entry) + if (result.list.length === maxEntries) { + result.listOfList.push(result.list) + result.list = [] + } + return result + }, + { listOfList: [] as O[][], list: [] as O[] }, + ) + + if (result.list.length > 0) { + result.listOfList.push(result.list) + } + + return asCallbackFn(cb, async () => { + for (const list of result.listOfList) { + const objects: { Key: string; VersionId?: string }[] = [] + list.forEach(function (value) { + if (typeof value === 'string') { + objects.push({ Key: value }) + } else { + objects.push({ Key: value.name, VersionId: value.versionId }) + } + }) + const deleteObjects = { Delete: { Quiet: true, Object: objects } } + const builder = new xml2js.Builder({ headless: true }) + const payload = Buffer.from(new TextEncoder().encode(builder.buildObject(deleteObjects))) + const headers = { + ['Content-MD5']: toMd5(payload), + } + + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + } + }) + } +} + +export class Helper { + constructor(private readonly client: TypedBase) {} + + async MultipleFileUpload( + bucketName: string, + objectName: string, + filePath: string, + metaData: ObjectMetaData = {}, + ): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + if (!isObject(metaData)) { + throw new TypeError('metaData should be of type "object"') + } + + // Inserts correct `content-type` attribute based on metaData and filePath + metaData = insertContentType(metaData, filePath) + + // Updates metaData to have the correct prefix if needed + const headers = prependXAMZMeta(metaData) + type Part = { + part: number + etag: string + } + + const executor = async (fd: fsp.FileHandle) => { + const stats = await fd.stat() + const fileSize = stats.size + if (fileSize > this.client.maxObjectSize) { + throw new Error(`${filePath} size : ${stats.size}, max allowed size: 5TB`) + } + + if (fileSize <= this.client.partSize) { + // simple PUT request, no multipart + const uploader = this.client.getUploader(bucketName, objectName, headers, false) + const buf = await fd.readFile() + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.client.enableSHA256) + return await uploader(buf, fileSize, sha256sum, md5sum) + } + + const previousUploadId = await this.client.findUploadId(bucketName, objectName) + let eTags: Part[] = [] + // if there was a previous incomplete upload, fetch all its uploaded parts info + let uploadId: string + if (previousUploadId) { + eTags = await this.client.listParts(bucketName, objectName, previousUploadId) + uploadId = previousUploadId + } else { + // there was no previous upload, initiate a new one + uploadId = await this.client.initiateNewMultipartUpload(bucketName, objectName, headers) + } + + { + const partSize = this.client.calculatePartSize(fileSize) + const uploader = this.client.getUploader(bucketName, objectName, headers, true) + // convert array to object to make things easy + const parts = eTags.reduce(function (acc, item) { + if (!acc[item.part]) { + acc[item.part] = item + } + return acc + }, {} as Record) + const partsDone: { part: number; etag: string }[] = [] + let partNumber = 1 + let uploadedSize = 0 + + // will be reused for hashing and uploading + // don't worry it's "unsafe", we will read data from fs to fill it + const buf = Buffer.allocUnsafe(this.client.partSize) + while (uploadedSize < fileSize) { + const part = parts[partNumber] + let length = partSize + if (length > fileSize - uploadedSize) { + length = fileSize - uploadedSize + } + + await fd.read(buf, 0, length, 0) + const { md5sum, sha256sum } = transformers.hashBinary(buf.subarray(0, length), this.client.enableSHA256) + + const md5sumHex = Buffer.from(md5sum, 'base64').toString('hex') + + if (part && md5sumHex === part.etag) { + // md5 matches, chunk already uploaded + partsDone.push({ part: partNumber, etag: part.etag }) + partNumber++ + uploadedSize += length + continue + } + + const objInfo = await uploader(uploadId, partNumber, buf.subarray(0, length), length, sha256sum, md5sum) + partsDone.push({ part: partNumber, etag: objInfo.etag }) + partNumber++ + uploadedSize += length + } + eTags = partsDone + } + + // at last, finish uploading + return this.client.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + } + + const ensureFileClose = async (executor: (fd: fsp.FileHandle) => Promise) => { + let fd + try { + fd = await fsp.open(filePath, 'r') + } catch (e) { + throw new Error(`failed to open file ${filePath}: err ${e}`, { cause: e }) + } + + try { + // make sure to keep await, otherwise file will be closed early. + return await executor(fd) + } finally { + await fd.close() + } + } + + return ensureFileClose(executor) + } +} diff --git a/src/typed-client2.ts b/src/typed-client2.ts new file mode 100644 index 00000000..4971f64c --- /dev/null +++ b/src/typed-client2.ts @@ -0,0 +1,940 @@ +import { promises as fsp } from 'node:fs' +import * as stream from 'node:stream' + +import async from 'async' +import _ from 'lodash' +import xml2js from 'xml2js' + +import * as errors from './errors.ts' +import { CopyDestinationOptions, CopySourceOptions } from './helpers.ts' +import { asCallback, asCallbackFn } from './internal/as-callback.ts' +import { fstat } from './internal/async.ts' +import { CopyConditions } from './internal/copy-conditions.ts' +import { + calculateEvenSplits, + extractMetadata, + getSourceVersionId, + getVersionId, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, + isValidBucketName, + isValidObjectName, + isValidPrefix, + PART_CONSTRAINTS, + partsRequired, + pipesetup, + prependXAMZMeta, + readableStream, + sanitizeETag, + toMd5, + uriEscape, + uriResourceEscape, +} from './internal/helper.ts' +import { readAsBuffer } from './internal/response.ts' +import type { + BucketItemCopy, + NoResultCallback, + ObjectMetaData, + RequestHeaders, + ResponseHeader, + ResultCallback, + SourceObjectStats, + UploadedObjectInfo, +} from './internal/type.ts' +import { RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts' +import type { NotificationEvent } from './notification.ts' +import { NotificationConfig, NotificationPoller } from './notification.ts' +import * as transformers from './transformers.ts' +import { TypedClient } from './typed-client.ts' +import type { RequestOption } from './typedBase.ts' +import { findCallback, uploadStream } from './typedBase.ts' +import type { ObjectLockConfig, S3ListObject } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' + +type PartConfig = { + bucketName: string + objectName: string + uploadID: string + partNumber: number + headers: RequestHeaders +} + +export class Client extends TypedClient { + // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object + protected copyObjectV1( + bucketName: string, + objectName: string, + srcObject: string, + arg4: unknown, + arg5: unknown, + ): Promise | void { + const [[conditions = null], cb] = findCallback<[CopyConditions | null], ResultCallback>([ + arg4, + arg5, + ]) + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(srcObject)) { + throw new TypeError('srcObject should be of type "string"') + } + if (srcObject === '') { + throw new errors.InvalidPrefixError(`Empty source prefix`) + } + + if (conditions !== null && !(conditions instanceof CopyConditions)) { + throw new TypeError('conditions should be of type "CopyConditions"') + } + + const headers: RequestHeaders = {} + headers['x-amz-copy-source'] = uriResourceEscape(srcObject) + + if (conditions !== null) { + if (conditions.modified !== '') { + headers['x-amz-copy-source-if-modified-since'] = conditions.modified + } + if (conditions.unmodified !== '') { + headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified + } + if (conditions.matchETag !== '') { + headers['x-amz-copy-source-if-match'] = conditions.matchETag + } + if (conditions.matchETagExcept !== '') { + headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept + } + } + + const method = 'PUT' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, headers }) + const body = await readAsBuffer(res) + return xmlParsers.parseCopyObject(body.toString()) + }) + } + + /** + * Internal Method to perform copy of an object. + * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions + * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions + * @param cb __function__ called with null if there is an error + * @returns Promise if no callack is passed. + */ + protected copyObjectV2( + sourceConfig: CopySourceOptions, + destConfig: CopyDestinationOptions, + cb?: ResultCallback, + ): Promise | void | false { + if (!(sourceConfig instanceof CopySourceOptions)) { + throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') + } + if (!(destConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + if (!destConfig.validate()) { + return false + } + if (!destConfig.validate()) { + return false + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) + + const bucketName = destConfig.Bucket + const objectName = destConfig.Object + + const method = 'PUT' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, headers }) + const body = await readAsBuffer(res) + const data = xmlParsers.parseCopyObject(body.toString()) + + const resHeaders = res.headers + + return { + Bucket: destConfig.Bucket, + Key: destConfig.Object, + LastModified: data.lastModified, + lastModified: data.lastModified, + MetaData: extractMetadata(resHeaders as ResponseHeader), + VersionId: getVersionId(resHeaders as ResponseHeader), + SourceVersionId: getSourceVersionId(resHeaders as ResponseHeader), + Etag: sanitizeETag(resHeaders.etag), + etag: sanitizeETag(resHeaders.etag), + Size: parseInt(resHeaders['content-length']!), + } as BucketItemCopy + }) + } + + copyObject( + bucketName: string, + objectName: string, + sourceObject: string, + conditions: CopyConditions, + callback: ResultCallback, + ): void + copyObject( + bucketName: string, + objectName: string, + sourceObject: string, + conditions: CopyConditions, + ): Promise + + // Backward compatibility for Copy Object API. + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + copyObject(...allArgs): Promise | void | false { + if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObjectV2(...allArgs) + } + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObjectV1(...allArgs) + } + + async uploadPartCopy(partConfig: PartConfig) { + const { bucketName, objectName, uploadID, partNumber, headers } = partConfig + + const method = 'PUT' + const query = `uploadId=${uploadID}&partNumber=${partNumber}` + const requestOptions: RequestOption = { method, bucketName, objectName: objectName, query, headers } + + const res = await this.makeRequestAsync(requestOptions) + + const body = await readAsBuffer(res) + + const data = xmlParsers.uploadPartParser(body.toString()) + + return { + etag: sanitizeETag(data.ETag), + key: objectName, + part: partNumber, + } + } + + // composeObject( + // destObjConfig: CopyDestinationOptions, + // sourceObjList: CopySourceOptions[], + // callback: ResultCallback, + // ): void + // composeObject(destObjConfig: CopyDestinationOptions, sourceObjList: CopySourceOptions[]): Promise + + composeObject( + destObjConfig: CopyDestinationOptions, + sourceObjList: CopySourceOptions[], + cb?: ResultCallback, + ): unknown { + const me = this // many async flows. so store the ref. + const sourceFilesLength = sourceObjList.length + + if (!Array.isArray(sourceObjList)) { + throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') + } + if (!(destObjConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + + if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, + ) + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + for (let i = 0; i < sourceFilesLength; i++) { + // @ts-expect-error index check + if (!sourceObjList[i].validate()) { + return false + } + } + + if (!destObjConfig.validate()) { + return false + } + + const getStatOptions = (srcConfig: CopySourceOptions) => { + let statOpts = {} + if (!isEmpty(srcConfig.VersionID)) { + statOpts = { + versionId: srcConfig.VersionID, + } + } + return statOpts + } + const srcObjectSizes: number[] = [] + let totalSize = 0 + let totalParts = 0 + + const sourceObjStats = sourceObjList.map((srcItem) => + me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), + ) + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return asCallback(cb, async () => { + const srcObjectInfos = await Promise.all(sourceObjStats) + const validatedStats = srcObjectInfos.map((resItemStat, index) => { + const srcConfig = sourceObjList[index] + + let srcCopySize = resItemStat.size + // Check if a segment is specified, and if so, is the + // segment within object bounds? + // @ts-expect-error index check + if (srcConfig.MatchRange) { + // Since range is specified, + // 0 <= src.srcStart <= src.srcEnd + // so only invalid case to check is: + // @ts-expect-error index check + const srcStart = srcConfig.Start + // @ts-expect-error index check + const srcEnd = srcConfig.End + if (srcEnd >= srcCopySize || srcStart < 0) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, + ) + } + srcCopySize = srcEnd - srcStart + 1 + } + + // Only the last source may be less than `absMinPartSize` + if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, + ) + } + + // Is data to copy too large? + totalSize += srcCopySize + if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { + throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) + } + + // record source size + srcObjectSizes[index] = srcCopySize + + // calculate parts needed for current source + totalParts += partsRequired(srcCopySize) + // Do we need more parts than we are allowed? + if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, + ) + } + + return resItemStat + }) + + if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObject(sourceObjList[0], destObjConfig) // use copyObjectV2 + } + + // preserve etag to avoid modification of object while copying. + for (let i = 0; i < sourceFilesLength; i++) { + // @ts-expect-error index check + sourceObjList[i].MatchETag = validatedStats[i].etag + } + + const newUploadHeaders = destObjConfig.getHeaders() + + const uploadId = await me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders) + + const uploadList = validatedStats + .map((resItemStat, idx) => { + // @ts-expect-error index check + return calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) + }) + .flatMap((splitSize, splitIndex) => { + if (splitSize === null) { + throw new Error('BUG: splitSize === 0') + } + + const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize + + const partIndex = splitIndex + 1 // part index starts from 1. + const totalUploads = Array.from(startIdx) + + // @ts-expect-error index check + const headers = sourceObjList[splitIndex].getHeaders() + + return totalUploads.map((splitStart, upldCtrIdx) => { + const splitEnd = endIdx[upldCtrIdx] + + const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` + headers['x-amz-copy-source'] = `${sourceObj}` + headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` + + return { + bucketName: destObjConfig.Bucket, + objectName: destObjConfig.Object, + uploadID: uploadId, + partNumber: partIndex, + headers: headers, + sourceObj: sourceObj, + } as PartConfig + }) + }) + + try { + const rr = await async.map(uploadList, async (o: PartConfig) => me.uploadPartCopy(o)) + const partsDone = rr.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) + return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone) + } catch (e) { + await this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId) + throw e + } + }) + } + + setObjectLockConfig( + bucketName: string, + lockConfigOpts: ObjectLockConfig = {}, + cb?: NoResultCallback, + ): void | Promise { + const retentionModes = [RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE] + const validUnits: RETENTION_VALIDITY_UNITS[] = [RETENTION_VALIDITY_UNITS.DAYS, RETENTION_VALIDITY_UNITS.YEARS] + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + if (lockConfigOpts.mode && !retentionModes.includes(lockConfigOpts.mode)) { + throw new TypeError(`lockConfigOpts.mode should be one of ${retentionModes}`) + } + if (lockConfigOpts.unit && !validUnits.includes(lockConfigOpts.unit)) { + throw new TypeError(`lockConfigOpts.unit should be one of ${validUnits}`) + } + if (lockConfigOpts.validity && !isNumber(lockConfigOpts.validity)) { + throw new TypeError(`lockConfigOpts.validity should be a number`) + } + + const method = 'PUT' + const query = 'object-lock' + + const config: { ObjectLockEnabled: string; Rule?: { DefaultRetention: Record } } = { + ObjectLockEnabled: 'Enabled', + } + const configKeys = Object.keys(lockConfigOpts) + // Check if keys are present and all keys are present. + if (configKeys.length > 0) { + if (_.difference(configKeys, ['unit', 'mode', 'validity']).length !== 0) { + throw new TypeError( + `lockConfigOpts.mode,lockConfigOpts.unit,lockConfigOpts.validity all the properties should be specified.`, + ) + } else { + config.Rule = { + DefaultRetention: {}, + } + if (lockConfigOpts.mode) { + config.Rule.DefaultRetention.Mode = lockConfigOpts.mode + } + if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.DAYS) { + config.Rule.DefaultRetention.Days = lockConfigOpts.validity + } else if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.YEARS) { + config.Rule.DefaultRetention.Years = lockConfigOpts.validity + } + } + } + + const builder = new xml2js.Builder({ + rootName: 'ObjectLockConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + + const headers: RequestHeaders = {} + headers['Content-MD5'] = toMd5(payload) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + getObjectLockConfig( + bucketName: string, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'object-lock' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectLockConfig(body.toString()) + }) + } + + removeBucketEncryption(bucketName: string, cb: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'DELETE' + const query = 'encryption' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [204]) + }) + } + + setBucketReplication( + bucketName: string, + replicationConfig: { + role?: string + rules?: unknown + } = {}, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(replicationConfig)) { + throw new errors.InvalidArgumentError('replicationConfig should be of type "object"') + } else { + if (isEmpty(replicationConfig.role)) { + throw new errors.InvalidArgumentError('Role cannot be empty') + } else if (replicationConfig.role && !isString(replicationConfig.role)) { + throw new errors.InvalidArgumentError('Invalid value for role', replicationConfig.role) + } + if (isEmpty(replicationConfig.rules)) { + throw new errors.InvalidArgumentError('Minimum one replication rule must be specified') + } + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'PUT' + const query = 'replication' + const headers: RequestHeaders = {} + + const replicationParamsConfig = { + ReplicationConfiguration: { + Role: replicationConfig.role, + Rule: replicationConfig.rules, + }, + } + + const builder = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true }) + + const payload = builder.buildObject(replicationParamsConfig) + + headers['Content-MD5'] = toMd5(payload) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + getBucketReplication(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'replication' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseReplicationConfig(body.toString()) + }) + } + + removeBucketReplication(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'DELETE' + const query = 'replication' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + }, + '', + [200, 204], + ) + }) + } + + removeAllBucketNotification(bucketName: string, cb?: NoResultCallback) { + return this.setBucketNotification(bucketName, new NotificationConfig(), cb) + } + + // in the S3 provider + getBucketNotification(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'GET' + const query = 'notification' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketNotification(body.toString()) + }) + } + + // Listens for bucket notifications. Returns an EventEmitter. + listenBucketNotification(bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix must be of type string') + } + if (!isString(suffix)) { + throw new TypeError('suffix must be of type string') + } + if (!Array.isArray(events)) { + throw new TypeError('events must be of type Array') + } + const listener = new NotificationPoller(this, bucketName, prefix, suffix, events) + listener.start() + + return listener + } + + // Remove all the notification configurations in the S3 provider + setBucketNotification(bucketName: string, config: NotificationConfig, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(config)) { + throw new TypeError('notification config should be of type "Object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'PUT' + const query = 'notification' + const builder = new xml2js.Builder({ + rootName: 'NotificationConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, payload) + }) + } + + // * `obj.lastModified` _Date_: modified time stamp + listObjectsV2(bucketName: string, prefix: string, recursive?: boolean, startAfter?: string) { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (startAfter === undefined) { + startAfter = '' + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + // if recursive is false set delimiter to '/' + const delimiter = recursive ? '' : '/' + let continuationToken = '' + let objects: S3ListObject[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + // if there are no objects to push do query for the next batch of objects + this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter!) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + if (result.isTruncated) { + continuationToken = result.nextContinuationToken + } else { + ended = true + } + objects = result.objects + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + readStream._read() + }) + } + return readStream + } + + // List the objects in the bucket using S3 ListObjects V2 + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) + // + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + + // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. + listObjectsV2Query( + bucketName: string, + prefix: string, + continuationToken: string, + delimiter: string, + maxKeys: number, + startAfter: string, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(continuationToken)) { + throw new TypeError('continuationToken should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + if (!isNumber(maxKeys)) { + throw new TypeError('maxKeys should be of type "number"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + const queries = [] + + // Call for listing objects v2 API + queries.push(`list-type=2`) + queries.push(`encoding-type=url`) + + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + + if (continuationToken) { + continuationToken = uriEscape(continuationToken) + queries.push(`continuation-token=${continuationToken}`) + } + // Set start-after + if (startAfter) { + startAfter = uriEscape(startAfter) + queries.push(`start-after=${startAfter}`) + } + // no need to escape maxKeys + if (maxKeys) { + if (maxKeys >= 1000) { + maxKeys = 1000 + } + queries.push(`max-keys=${maxKeys}`) + } + queries.sort() + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + const method = 'GET' + const transformer = transformers.getListObjectsV2Transformer() + this.makeRequestAsync({ method, bucketName, query }, '', [200], '', true).then( + (response) => { + pipesetup(response, transformer) + }, + (e) => { + return transformer.emit('error', e) + }, + ) + return transformer + } + + // Copy the object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `srcObject` _string_: path of the source object to be copied + // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) + + // * `versionId` _string_: versionId of the object + putObject( + bucketName: string, + objectName: string, + stream: string | Buffer | stream.Readable, + sizeArg?: number, + metaDataArg?: ObjectMetaData, + callbackArg?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let [[size, metaData = {}], callback] = findCallback< + [number | undefined, ObjectMetaData], + ResultCallback + >([sizeArg, metaDataArg, callbackArg]) + + // We'll need to shift arguments to the left because of metaData + // and size being optional. + if (isObject(size)) { + metaData = size + size = undefined + } + + // Ensures Metadata has appropriate prefix for A3 API + const headers = prependXAMZMeta(metaData) + if (typeof stream === 'string' || stream instanceof Buffer) { + // Adapts the non-stream interface into a stream. + if (size !== undefined) { + if (size !== Buffer.from(stream).length) { + throw new errors.InvalidArgumentError( + `size input and object length mismatch, object has length ${stream.length} but input size is ${size}`, + ) + } + } + size = Buffer.from(stream).length + } else if (!isReadableStream(stream)) { + throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"') + } + + if (!isOptionalFunction(callback)) { + throw new TypeError('callback should be of type "function"') + } + + if (isNumber(size) && size < 0) { + throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`) + } + + if (isNumber(size) && size > this.maxObjectSize) { + throw new TypeError(`size should not be more than ${this.maxObjectSize}`) + } + + const executor = async () => { + // Get the part size and forward that to the BlockStream. Default to the + // largest block size possible if necessary. + if (size === undefined) { + const statSize = await getContentLength(stream) + if (statSize !== null) { + size = statSize + } + } + + if (!isNumber(size)) { + // Backward compatibility + size = this.maxObjectSize + } + + const partSize = this.calculatePartSize(size) + + if (typeof stream === 'string' || Buffer.isBuffer(stream) || size <= this.partSize) { + const uploader = this.getUploader(bucketName, objectName, headers, false) + const buf = isReadableStream(stream) ? await readAsBuffer(stream) : Buffer.from(stream) + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.enableSHA256) + return uploader(buf, buf.length, sha256sum, md5sum) + } + + return uploadStream({ + client: this, + stream: isReadableStream(stream) ? stream : readableStream(stream), + partSize, + bucketName, + objectName, + headers, + }) + } + + return asCallback(callback, executor()) + } +} + +async function getContentLength(s: stream.Readable | Buffer | string): Promise { + const length = (s as unknown as Record).length as number | undefined + if (isNumber(length)) { + return length + } + + // property of fs.ReadStream + const filePath = (s as unknown as Record).path as string | undefined + if (filePath) { + const stat = await fsp.lstat(filePath) + return stat.size + } + + // property of fs.ReadStream + const fd = (s as unknown as Record).fd as number | null | undefined + + if (fd) { + const stat = await fstat(fd) + return stat.size + } + + return null +} diff --git a/src/typedBase.ts b/src/typedBase.ts new file mode 100644 index 00000000..c74dbaa1 --- /dev/null +++ b/src/typedBase.ts @@ -0,0 +1,2069 @@ +import * as crypto from 'node:crypto' +import * as fs from 'node:fs' +import type { IncomingMessage } from 'node:http' +import * as http from 'node:http' +import * as https from 'node:https' +import * as path from 'node:path' +import * as stream from 'node:stream' + +import async from 'async' +import BlockStream2 from 'block-stream2' +import { isBrowser } from 'browser-or-node' +import _ from 'lodash' +import * as querystring from 'query-string' +import queryString from 'query-string' +import xml2js from 'xml2js' + +import { CredentialProvider } from './CredentialProvider.ts' +import * as errors from './errors.ts' +import { S3Error } from './errors.ts' +import { DEFAULT_REGION } from './helpers.ts' +import { asCallback, asCallbackFn } from './internal/as-callback.ts' +import { fsp, streamPromise } from './internal/async.ts' +import { Extensions } from './internal/extensions.ts' +import type { AnyFunction } from './internal/helper.ts' +import { + extractMetadata, + getVersionId, + insertContentType, + isAmazonEndpoint, + isBoolean, + isDefined, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, + isValidBucketName, + isValidEndpoint, + isValidObjectName, + isValidPort, + isValidPrefix, + isVirtualHostStyle, + makeDateLong, + pipesetup, + prependXAMZMeta, + readableStream, + sanitizeETag, + toSha256, + uriEscape, + uriResourceEscape, +} from './internal/helper.ts' +import { drainResponse, readAsBuffer, readAsString } from './internal/response.ts' +import type { Region } from './internal/s3-endpoints.ts' +import { getS3Endpoint } from './internal/s3-endpoints.ts' +import type { + Binary, + BucketItemFromList, + BucketItemStat, + GetObjectOpt, + IRequest, + MakeBucketOpt, + NoResultCallback, + ObjectMetaData, + RequestHeaders, + ResponseHeader, + ResultCallback, + StatObjectOpts, + UploadedObjectInfo, +} from './internal/type.ts' +import { signV4 } from './signing.ts' +import * as transformers from './transformers.ts' +import type { Part } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' + +const requestOptionProperties = [ + 'agent', + 'ca', + 'cert', + 'ciphers', + 'clientCertEngine', + 'crl', + 'dhparam', + 'ecdhCurve', + 'family', + 'honorCipherOrder', + 'key', + 'passphrase', + 'pfx', + 'rejectUnauthorized', + 'secureOptions', + 'secureProtocol', + 'servername', + 'sessionIdContext', +] as const + +export interface ClientOptions { + endPoint: string + accessKey: string + secretKey: string + useSSL?: boolean + port?: number + region?: Region + transport?: typeof http | typeof https + sessionToken?: string + partSize?: number + pathStyle?: boolean + credentialsProvider?: CredentialProvider + s3AccelerateEndpoint?: string + transportAgent?: http.Agent +} + +// will be replaced by rollup plugin +const version = process.env.MINIO_JS_PACKAGE_VERSION || 'development' +const Package = { version } + +export type RequestMethod = 'HEAD' | 'GET' | 'POST' | 'DELETE' | 'PUT' +export type RequestOption = Partial & { + method: RequestMethod + bucketName?: string + objectName?: string + region?: string + query?: string + pathStyle?: boolean +} + +/** + * @internal + */ +export function findCallback(args: unknown[]): [A, T | undefined] { + const index = args.findIndex((v) => isFunction(v)) + if (index === -1) { + return [args as A, undefined] + } + + return [args.slice(0, index) as A, args[index] as T] +} + +export class TypedBase { + protected transport: typeof http | typeof https + protected host: string + protected port: number + protected protocol: string + protected accessKey: string + protected secretKey: string + protected sessionToken?: string + protected userAgent: string + protected anonymous: boolean + protected pathStyle: boolean + protected regionMap: Record + public region?: string + protected credentialsProvider?: CredentialProvider + partSize: number = 64 * 1024 * 1024 + protected overRidePartSize?: boolean + + protected maximumPartSize = 5 * 1024 * 1024 * 1024 + maxObjectSize = 5 * 1024 * 1024 * 1024 * 1024 + public enableSHA256: boolean + protected s3AccelerateEndpoint?: string + protected reqOptions: Record + + private readonly clientExtensions: Extensions + private logStream?: stream.Writable + private readonly transportAgent: http.Agent + + constructor(params: ClientOptions) { + // @ts-expect-error deprecated property + if (params.secure !== undefined) { + throw new Error('"secure" option deprecated, "useSSL" should be used instead') + } + // Default values if not specified. + if (params.useSSL === undefined) { + params.useSSL = true + } + if (!params.port) { + params.port = 0 + } + // Validate input params. + if (!isValidEndpoint(params.endPoint)) { + throw new errors.InvalidEndpointError(`Invalid endPoint : ${params.endPoint}`) + } + if (!isValidPort(params.port)) { + throw new errors.InvalidArgumentError(`Invalid port : ${params.port}`) + } + if (!isBoolean(params.useSSL)) { + throw new errors.InvalidArgumentError( + `Invalid useSSL flag type : ${params.useSSL}, expected to be of type "boolean"`, + ) + } + + // Validate region only if its set. + if (params.region) { + if (!isString(params.region)) { + throw new errors.InvalidArgumentError(`Invalid region : ${params.region}`) + } + } + + const host = params.endPoint.toLowerCase() + let port = params.port + let protocol: string + let transport + let transportAgent: http.Agent + // Validate if configuration is not using SSL + // for constructing relevant endpoints. + if (params.useSSL) { + // Defaults to secure. + transport = https + protocol = 'https:' + port = port || 443 + transportAgent = https.globalAgent + } else { + transport = http + protocol = 'http:' + port = port || 80 + transportAgent = http.globalAgent + } + + // if custom transport is set, use it. + if (params.transport) { + if (!isObject(params.transport)) { + throw new errors.InvalidArgumentError( + `Invalid transport type : ${params.transport}, expected to be type "object"`, + ) + } + transport = params.transport + } + + // if custom transport agent is set, use it. + if (params.transportAgent) { + if (!isObject(params.transportAgent)) { + throw new errors.InvalidArgumentError( + `Invalid transportAgent type: ${params.transportAgent}, expected to be type "object"`, + ) + } + + transportAgent = params.transportAgent + } + + // User Agent should always following the below style. + // Please open an issue to discuss any new changes here. + // + // MinIO (OS; ARCH) LIB/VER APP/VER + // + const libraryComments = `(${process.platform}; ${process.arch})` + const libraryAgent = `MinIO ${libraryComments} minio-js/${Package.version}` + // User agent block ends. + + this.transport = transport + this.transportAgent = transportAgent + this.host = host + this.port = port + this.protocol = protocol + this.userAgent = `${libraryAgent}` + + // Default path style is true + if (params.pathStyle === undefined) { + this.pathStyle = true + } else { + this.pathStyle = params.pathStyle + } + + this.accessKey = params.accessKey ?? '' + this.secretKey = params.secretKey ?? '' + this.sessionToken = params.sessionToken + this.anonymous = !this.accessKey || !this.secretKey + + if (params.credentialsProvider) { + this.credentialsProvider = params.credentialsProvider + void this.checkAndRefreshCreds() + } + + this.regionMap = {} + if (params.region) { + this.region = params.region + } + + if (params.partSize) { + this.partSize = params.partSize + this.overRidePartSize = true + } + if (this.partSize < 5 * 1024 * 1024) { + throw new errors.InvalidArgumentError(`Part size should be greater than 5MB`) + } + if (this.partSize > 5 * 1024 * 1024 * 1024) { + throw new errors.InvalidArgumentError(`Part size should be less than 5GB`) + } + + // SHA256 is enabled only for authenticated http requests. If the request is authenticated + // and the connection is https we use x-amz-content-sha256=UNSIGNED-PAYLOAD + // header for signature calculation. + this.enableSHA256 = !this.anonymous && !params.useSSL + + this.s3AccelerateEndpoint = params.s3AccelerateEndpoint || undefined + this.reqOptions = {} + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.clientExtensions = new Extensions(this) + } + + /** + * This is s3 Specific and does not hold validity in any other Object storage. + */ + private getAccelerateEndPointIfSet(bucketName: string, objectName?: string) { + if (!isEmpty(this.s3AccelerateEndpoint) && !isEmpty(bucketName) && !isEmpty(objectName)) { + // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html + // Disable transfer acceleration for non-compliant bucket names. + if (bucketName.includes('.')) { + throw new Error(`Transfer Acceleration is not supported for non compliant bucket:${bucketName}`) + } + // If transfer acceleration is requested set new host. + // For more details about enabling transfer acceleration read here. + // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html + return this.s3AccelerateEndpoint + } + return false + } + + /** + * @param endPoint - valid S3 acceleration end point + */ + public setS3TransferAccelerate(endPoint: string) { + this.s3AccelerateEndpoint = endPoint + } + + /** + * Sets the supported request options. + */ + public setRequestOptions(options: Pick) { + // TODO: add options type details + if (!isObject(options)) { + throw new TypeError('request options should be of type "object"') + } + this.reqOptions = _.pick(options, requestOptionProperties) + } + + /** + * returns options object that can be used with http.request() + * Takes care of constructing virtual-host-style or path-style hostname + */ + protected getRequestOptions(opts: RequestOption): IRequest & { host: string; headers: Record } { + const method = opts.method + const region = opts.region + const bucketName = opts.bucketName + let objectName = opts.objectName + const headers = opts.headers + const query = opts.query + + let reqOptions = { + method, + headers: {} as RequestHeaders, + protocol: this.protocol, + // If custom transportAgent was supplied earlier, we'll inject it here + agent: this.transportAgent, + } + + // Verify if virtual host supported. + let virtualHostStyle + if (bucketName) { + virtualHostStyle = isVirtualHostStyle(this.host, this.protocol, bucketName, this.pathStyle) + } + + let path = '/' + let host = this.host + + let port: undefined | number + if (this.port) { + port = this.port + } + + if (objectName) { + objectName = `${uriResourceEscape(objectName)}` + } + + // For Amazon S3 endpoint, get endpoint based on region. + if (isAmazonEndpoint(host)) { + const accelerateEndPoint = this.getAccelerateEndPointIfSet(bucketName!, objectName) + if (accelerateEndPoint) { + host = `${accelerateEndPoint}` + } else { + host = getS3Endpoint(region!) + } + } + + if (virtualHostStyle && !opts.pathStyle) { + // For all hosts which support virtual host style, `bucketName` + // is part of the hostname in the following format: + // + // var host = 'bucketName.example.com' + // + if (bucketName) { + host = `${bucketName}.${host}` + } + if (objectName) { + path = `/${objectName}` + } + } else { + // For all S3 compatible storage services we will fallback to + // path style requests, where `bucketName` is part of the URI + // path. + if (bucketName) { + path = `/${bucketName}` + } + if (objectName) { + path = `/${bucketName}/${objectName}` + } + } + + if (query) { + path += `?${query}` + } + reqOptions.headers.host = host + if ((reqOptions.protocol === 'http:' && port !== 80) || (reqOptions.protocol === 'https:' && port !== 443)) { + reqOptions.headers.host = `${host}:${port}` + } + reqOptions.headers['user-agent'] = this.userAgent + if (headers) { + // have all header keys in lower case - to make signing easy + for (const [k, v] of Object.entries(headers)) { + reqOptions.headers[k.toLowerCase()] = v + } + } + + // Use any request option specified in minioClient.setRequestOptions() + reqOptions = Object.assign({}, this.reqOptions, reqOptions) + + return { + ...reqOptions, + headers: _.mapValues(_.pickBy(reqOptions.headers, isDefined), (v) => v.toString()), + host, + port, + path, + } satisfies https.RequestOptions + } + + /** + * Set application specific information. + * + * Generates User-Agent in the following style. + * + * MinIO (OS; ARCH) LIB/VER APP/VER + * + * @param appName - Application name. + * @param appVersion - Application version. + */ + public setAppInfo(appName: string, appVersion: string) { + if (!isString(appName)) { + throw new TypeError(`Invalid appName: ${appName}`) + } + if (appName.trim() === '') { + throw new errors.InvalidArgumentError('Input appName cannot be empty.') + } + if (!isString(appVersion)) { + throw new TypeError(`Invalid appVersion: ${appVersion}`) + } + if (appVersion.trim() === '') { + throw new errors.InvalidArgumentError('Input appVersion cannot be empty.') + } + this.userAgent = `${this.userAgent} ${appName}/${appVersion}` + } + + /** + * Calculate part size given the object size. Part size will be at least this.partSize + * + * @param size - total size + * + * @internal + */ + public calculatePartSize(size: number) { + if (!isNumber(size)) { + throw new TypeError('size should be of type "number"') + } + if (size > this.maxObjectSize) { + throw new TypeError(`size should not be more than ${this.maxObjectSize}`) + } + if (this.overRidePartSize) { + return this.partSize + } + let partSize = this.partSize + for (;;) { + // while(true) {...} throws linting error. + // If partSize is big enough to accomodate the object size, then use it. + if (partSize * 10000 > size) { + return partSize + } + // Try part sizes as 64MB, 80MB, 96MB etc. + partSize += 16 * 1024 * 1024 + } + } + + /** + * log the request, response, error + */ + private logHTTP(reqOptions: IRequest, response: http.IncomingMessage | null, err?: unknown) { + // if no logStream available return. + if (!this.logStream) { + return + } + if (!isObject(reqOptions)) { + throw new TypeError('reqOptions should be of type "object"') + } + if (response && !isReadableStream(response)) { + throw new TypeError('response should be of type "Stream"') + } + if (err && !(err instanceof Error)) { + throw new TypeError('err should be of type "Error"') + } + const logStream = this.logStream + const logHeaders = (headers: RequestHeaders) => { + Object.entries(headers).forEach(([k, v]) => { + if (k == 'authorization') { + if (isString(v)) { + const redactor = new RegExp('Signature=([0-9a-f]+)') + v = v.replace(redactor, 'Signature=**REDACTED**') + } + } + logStream.write(`${k}: ${v}\n`) + }) + logStream.write('\n') + } + logStream.write(`REQUEST: ${reqOptions.method} ${reqOptions.path}\n`) + logHeaders(reqOptions.headers) + if (response) { + this.logStream.write(`RESPONSE: ${response.statusCode}\n`) + logHeaders(response.headers as RequestHeaders) + } + if (err) { + logStream.write('ERROR BODY:\n') + const errJSON = JSON.stringify(err, null, '\t') + logStream.write(`${errJSON}\n`) + } + } + + /** + * Enable tracing + */ + public traceOn(stream?: stream.Writable) { + if (!stream) { + stream = process.stdout + } + this.logStream = stream + } + + /** + * Disable tracing + */ + public traceOff() { + this.logStream = undefined + } + + /** + * makeRequest is the primitive used by the apis for making S3 requests. + * payload can be empty string in case of no payload. + * statusCode is the expected statusCode. If response.statusCode does not match + * we parse the XML error and call the callback with the error message. + * + * A valid region is passed by the calls - listBuckets, makeBucket and getBucketRegion. + * + * @internal + */ + makeRequestAsync( + options: RequestOption, + payload: Binary = '', + expectedCodes: number[] = [200], + region = '', + returnResponse = true, + ): Promise { + if (!isObject(options)) { + throw new TypeError('options should be of type "object"') + } + if (!isString(payload) && !isObject(payload)) { + // Buffer is of type 'object' + throw new TypeError('payload should be of type "string" or "Buffer"') + } + expectedCodes.forEach((statusCode) => { + if (!isNumber(statusCode)) { + throw new TypeError('statusCode should be of type "number"') + } + }) + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isBoolean(returnResponse)) { + throw new TypeError('returnResponse should be of type "boolean"') + } + if (!options.headers) { + options.headers = {} + } + if (options.method === 'POST' || options.method === 'PUT' || options.method === 'DELETE') { + options.headers['content-length'] = payload.length.toString() + } + + const sha256sum = this.enableSHA256 ? toSha256(payload) : '' + const stream = readableStream(payload) + return this.makeRequestStreamAsync(options, stream, sha256sum, expectedCodes, region, returnResponse) + } + + /** + * new request with promise + * + * No need to drain response, response body is not valid + */ + async makeRequestAsyncOmit( + options: RequestOption, + payload: Binary = '', + statusCodes: number[] = [200], + region = '', + ): Promise> { + return await this.makeRequestAsync(options, payload, statusCodes, region, false) + } + + /** + * makeRequestStream will be used directly instead of makeRequest in case the payload + * is available as a stream. for ex. putObject + * + * @internal + */ + async makeRequestStreamAsync( + options: RequestOption, + stream: stream.Readable | Buffer, + sha256sum: string, + statusCodes: number[] = [200], + region = '', + returnResponse = true, + ) { + if (!isObject(options)) { + throw new TypeError('options should be of type "object"') + } + if (!(Buffer.isBuffer(stream) || isReadableStream(stream))) { + throw new errors.InvalidArgumentError('stream should be a Buffer or readable Stream') + } + if (!isString(sha256sum)) { + throw new TypeError('sha256sum should be of type "string"') + } + statusCodes.forEach((statusCode) => { + if (!isNumber(statusCode)) { + throw new TypeError('statusCode should be of type "number"') + } + }) + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isBoolean(returnResponse)) { + throw new TypeError('returnResponse should be of type "boolean"') + } + + // sha256sum will be empty for anonymous or https requests + if (!this.enableSHA256 && sha256sum.length !== 0) { + throw new errors.InvalidArgumentError(`sha256sum expected to be empty for anonymous or https requests`) + } + // sha256sum should be valid for non-anonymous http requests. + if (this.enableSHA256 && sha256sum.length !== 64) { + throw new errors.InvalidArgumentError(`Invalid sha256sum : ${sha256sum}`) + } + + const regionPromise = region ? Promise.resolve(region) : this.getBucketRegionAsync(options.bucketName!) + + void this.checkAndRefreshCreds() + + return regionPromise.then( + (finalRegion) => + new Promise((resolve, reject) => { + options.region = finalRegion + const reqOptions = this.getRequestOptions(options) + if (!this.anonymous) { + // For non-anonymous https requests sha256sum is 'UNSIGNED-PAYLOAD' for signature calculation. + if (!this.enableSHA256) { + sha256sum = 'UNSIGNED-PAYLOAD' + } + + const date = new Date() + + reqOptions.headers['x-amz-date'] = makeDateLong(date) + reqOptions.headers['x-amz-content-sha256'] = sha256sum + if (this.sessionToken) { + reqOptions.headers['x-amz-security-token'] = this.sessionToken + } + + reqOptions.headers.authorization = signV4( + reqOptions, + this.accessKey, + this.secretKey, + finalRegion, + date, + sha256sum, + ) + } + + const req = this.transport.request(reqOptions, (response) => { + if (!response.statusCode) { + return reject(new Error("BUG: response doesn't have a statusCode")) + } + + if (!statusCodes.includes(response.statusCode)) { + // For an incorrect region, S3 server always sends back 400. + // But we will do cache invalidation for all errors so that, + // in future, if AWS S3 decides to send a different status code or + // XML error code we will still work fine. + delete this.regionMap[options.bucketName!] + // @ts-expect-error looks like `getErrorTransformer` want a `http.ServerResponse`, + // but we only have a http.IncomingMessage here + const errorTransformer = transformers.getErrorTransformer(response) + pipesetup(response, errorTransformer).on('error', (e) => { + this.logHTTP(reqOptions, response, e) + reject(e) + }) + return + } + this.logHTTP(reqOptions, response) + if (returnResponse) { + return resolve(response) + } + // We drain the socket so that the connection gets closed. Note that this + // is not expensive as the socket will not have any data. + drainResponse(response).then(() => resolve(response), reject) + }) + + req.on('error', (e) => { + this.logHTTP(reqOptions, null, e) + reject(e) + }) + + if (Buffer.isBuffer(stream)) { + req.end(stream) + } else { + pipesetup(stream, req) + } + }), + ) + } + + /// Bucket operations + + /** + * Creates the bucket `bucketName`. + * + * @param bucketName - Name of the bucket + * @param region - region, see ts types for valid values, or use empty string. + * @param makeOpts - Options to create a bucket. + * @param callback? - if no callback. will return a promise. + */ + makeBucket(bucketName: string, region: Region, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + makeBucket(bucketName: string, region: Region, callback: NoResultCallback): void + makeBucket(bucketName: string, callback: NoResultCallback): void + makeBucket(bucketName: string, region?: Region, makeOpts?: MakeBucketOpt): Promise + + // there is also a deprecated Backward Compatibility sign + // makeBucket(bucketName: string, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + + makeBucket( + bucketName: string, + regionOrCallback?: string | NoResultCallback | MakeBucketOpt, // MakeBucketOpt as second params is deprecated + makeOptsOrCallback?: MakeBucketOpt | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + let [[region = '', makeOpts = {}], cb] = findCallback< + [string, MakeBucketOpt] | [MakeBucketOpt, string], + NoResultCallback + >([regionOrCallback, makeOptsOrCallback, callback]) + if (isObject(region)) { + // Backward Compatibility + // makeBucket(bucketName: string, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + makeOpts = region + region = '' + } + + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isObject(makeOpts)) { + throw new TypeError('makeOpts should be of type "object"') + } + + let payload = '' + // Region already set in constructor, validate if + // caller requested bucket location is same. + if (region && this.region) { + if (region !== this.region) { + throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`) + } + } + // sending makeBucket request with XML containing 'us-east-1' fails. For + // default region server expects the request without body + if (region && region !== DEFAULT_REGION) { + const builder = new xml2js.Builder({}) + + payload = builder.buildObject({ + CreateBucketConfiguration: { + $: { + xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', + }, + LocationConstraint: region, + }, + }) + } + const method = 'PUT' + const headers: RequestHeaders = {} + if (makeOpts.ObjectLocking) { + headers['x-amz-bucket-object-lock-enabled'] = true + } + if (!region) { + region = DEFAULT_REGION + } + const finalRegion = region // type narrow + const requestOpt: RequestOption = { method, bucketName, headers } + return asCallbackFn(cb, async () => { + try { + await this.makeRequestAsyncOmit(requestOpt, payload, [200], finalRegion) + } catch (err: unknown) { + if (region === '' || region === DEFAULT_REGION) { + if (err instanceof S3Error) { + const errCode = err.code + const errRegion = err.region + if (errCode === 'AuthorizationHeaderMalformed' && errRegion !== '') { + // Retry with region returned as part of error + await this.makeRequestAsyncOmit(requestOpt, payload, [200], errCode) + } + } + } + throw err + } + }) + } + + /** + * List of buckets created. + */ + listBuckets(): Promise + listBuckets(callback: ResultCallback): void + listBuckets(cb?: ResultCallback): void | Promise { + const method = 'GET' + return asCallbackFn(cb, async () => { + const response = await this.makeRequestAsync({ method }, '', [200], DEFAULT_REGION) + const body = await readAsBuffer(response) + return xmlParsers.parseListBucket(body.toString()) + }) + } + + listIncompleteUploads(bucket: string, prefix: string, recursive: boolean): stream.Readable { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (!isValidBucketName(bucket)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + const delimiter = recursive ? '' : '/' + let keyMarker = '' + let uploadIdMarker = '' + const uploads: unknown[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one upload info per _read() + if (uploads.length) { + return readStream.push(uploads.shift()) + } + if (ended) { + return readStream.push(null) + } + this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + result.prefixes.forEach((prefix) => uploads.push(prefix)) + async.eachSeries( + result.uploads, + (upload, cb) => { + // for each incomplete upload add the sizes of its uploaded parts + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.listParts(bucket, upload.key, upload.uploadId).then( + (parts: any) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + upload.size = parts.reduce((acc, item) => acc + item.size, 0) + uploads.push(upload) + cb() + }, + (err: any) => cb(err), + ) + }, + (err) => { + if (err) { + readStream.emit('error', err) + return + } + if (result.isTruncated) { + keyMarker = result.nextKeyMarker + uploadIdMarker = result.nextUploadIdMarker + } else { + ended = true + } + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + readStream._read() + }, + ) + }) + } + return readStream + } + + /** + * Remove a bucket. + * + * @param bucketName - name of the bucket + */ + bucketExists(bucketName: string, callback: ResultCallback): void + bucketExists(bucketName: string): Promise + + // * `callback(err)` _function_ : `err` is `null` if the bucket exists + bucketExists(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'HEAD' + + return asCallbackFn(cb, async () => { + try { + await this.makeRequestAsyncOmit({ method, bucketName }, '', [200], '') + } catch (err) { + if (err instanceof S3Error) { + if (err.code == 'NoSuchBucket' || err.code == 'NotFound') { + return false + } + } + + throw err + } + + return true + }) + } + + /** + * Remove a bucket + * + * @param bucketName - name of the bucket + * @param callback + */ + removeBucket(bucketName: string, callback: NoResultCallback): void + removeBucket(bucketName: string): Promise + + // * `callback(err)` _function_ : `err` is `null` if the bucket is removed successfully. + removeBucket(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + const method = 'DELETE' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName }, '', [204], '') + delete this.regionMap[bucketName] + }) + } + + /** + * Remove the partially uploaded object. + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param callback - callback function is called with non `null` value in case of error + */ + removeIncompleteUpload(bucketName: string, objectName: string, callback: NoResultCallback): void + removeIncompleteUpload(bucketName: string, objectName: string): Promise + + removeIncompleteUpload(bucketName: string, objectName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.IsValidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallbackFn(cb, async () => { + const uploadId = await this.findUploadId(bucketName, objectName) + if (!uploadId) { + return + } + const method = 'DELETE' + const query = `uploadId=${uploadId}` + await this.makeRequestAsync( + { + method, + bucketName, + objectName, + query, + }, + '', + [204], + '', + false, + ) + }) + } + + fGetObject(bucketName: string, objectName: string, filePath: string, callback: NoResultCallback): void + fGetObject( + bucketName: string, + objectName: string, + filePath: string, + getOpts: GetObjectOpt, + callback: NoResultCallback, + ): void + /** + * Callback is called with `error` in case of error or `null` in case of success + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param filePath - path to which the object data will be written to + * @param getOpts? - Optional object get option + */ + fGetObject(bucketName: string, objectName: string, filePath: string, getOpts?: GetObjectOpt): Promise + + fGetObject( + bucketName: string, + objectName: string, + filePath: string, + getOptsOrCallback?: GetObjectOpt | NoResultCallback, + callback?: NoResultCallback, + ) { + // Input validation. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + const [[getOpts = {}], cb] = findCallback<[GetObjectOpt], NoResultCallback>([getOptsOrCallback, callback]) + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const executor = async (): Promise => { + let partFileStream: stream.Writable + const objStat = await this.statObject(bucketName, objectName, getOpts) + const partFile = `${filePath}.${objStat.etag}.part.minio` + + await fsp.mkdir(path.dirname(filePath), { recursive: true }) + + let offset = 0 + try { + const stats = await fsp.stat(partFile) + if (objStat.size === stats.size) { + return partFile + } + offset = stats.size + partFileStream = fs.createWriteStream(partFile, { flags: 'a' }) + } catch (e) { + if (e instanceof Error && (e as unknown as { code: string }).code === 'ENOENT') { + // file not exist + partFileStream = fs.createWriteStream(partFile, { flags: 'w' }) + } else { + // other error, maybe access deny + throw e + } + } + + const downloadStream = await this.getPartialObject(bucketName, objectName, offset, 0, getOpts) + + await streamPromise.pipeline(downloadStream, partFileStream) + const stats = await fsp.stat(partFile) + if (stats.size === objStat.size) { + return partFile + } + + throw new Error('Size mismatch between downloaded file and the object') + } + + return asCallback( + cb, + executor().then((partFile) => fsp.rename(partFile, filePath)), + ) + } + + getObject( + bucketName: string, + objectName: string, + getOpts: GetObjectOpt, + callback: ResultCallback, + ): void + getObject(bucketName: string, objectName: string, callback: ResultCallback): void + + /** + * Get Objects. return a readable stream of the object content by callback or promise. + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param getOpts + */ + getObject(bucketName: string, objectName: string, getOpts?: GetObjectOpt): Promise + + getObject( + bucketName: string, + objectName: string, + getOpts_Callback?: GetObjectOpt | ResultCallback, // getOpts + callback?: ResultCallback, // callback + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const [[getOpts = {}], cb] = findCallback<[GetObjectOpt], ResultCallback>([ + getOpts_Callback, + callback, + ]) + + return asCallback(cb, this.getPartialObject(bucketName, objectName, 0, 0, getOpts)) + } + + /** + * Callback is called with readable stream of the partial object content. + */ + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length?: number, + getOpts?: GetObjectOpt, + ): Promise + + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + callback: ResultCallback, + ): void + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length: number, + callback: ResultCallback, + ): void + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length: number, + getOpts: GetObjectOpt, + callback: ResultCallback, + ): void + + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length_callback?: number | ResultCallback, // length + getOpts_callback?: GetObjectOpt | ResultCallback, // get opt + callback?: ResultCallback, // callback + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isNumber(offset)) { + throw new TypeError('offset should be of type "number"') + } + + const [[length = 0, getOpts = {}], cb] = findCallback<[number, GetObjectOpt], ResultCallback>([ + length_callback, + getOpts_callback, + callback, + ]) + + if (!isNumber(length)) { + throw new TypeError(`length should be of type "number"`) + } + + let range = '' + if (offset || length) { + if (offset) { + range = `bytes=${+offset}-` + } else { + range = 'bytes=0-' + offset = 0 + } + if (length) { + range += `${+length + offset - 1}` + } + } + + const headers: RequestHeaders = {} + if (range !== '') { + headers.range = range + } + + const expectedStatusCodes = [200] + if (range) { + expectedStatusCodes.push(206) + } + + const method = 'GET' + const query = queryString.stringify(getOpts) + return asCallback( + cb, + this.makeRequestAsync({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes), + ) + } + + /** + * Uploads the object. + * + * Uploading a stream + * __Arguments__ + * * `bucketName` _string_: name of the bucket + * * `objectName` _string_: name of the object + * * `stream` _Stream_: Readable stream + * * `size` _number_: size of the object (optional) + * * `callback(err, etag)` _function_: non null `err` indicates error, `etag` _string_ is the etag of the object uploaded. + * + * Uploading "Buffer" or "string" + * __Arguments__ + * * `bucketName` _string_: name of the bucket + * * `objectName` _string_: name of the object + * * `string or Buffer` _string_ or _Buffer_: string or buffer + * * `callback(err, objInfo)` _function_: `err` is `null` in case of success and `info` will have the following object details: + * * `etag` _string_: etag of the object + * * `callback(err, objInfo)` _function_: non null `err` indicates error, `objInfo` _object_ which contains versionId and etag. + */ + fPutObject( + bucketName: string, + objectName: string, + filePath: string, + metaDataOrCallback?: ObjectMetaData, + maybeCallback?: NoResultCallback, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + let [[metaData = {}], callback] = findCallback<[ObjectMetaData], NoResultCallback>([ + metaDataOrCallback, + maybeCallback, + ]) + + if (!isObject(metaData)) { + throw new TypeError('metaData should be of type "object"') + } + + // Inserts correct `content-type` attribute based on metaData and filePath + metaData = insertContentType(metaData, filePath) + + // Updates metaData to have the correct prefix if needed + const headers = prependXAMZMeta(metaData) + const apiCallback = callback + + type Part = { + part: number + etag: string + } + + const executor = async (fd: fsp.FileHandle) => { + const stats = await fd.stat() + const fileSize = stats.size + if (fileSize > this.maxObjectSize) { + throw new Error(`${filePath} size : ${stats.size}, max allowed size: 5TB`) + } + + if (fileSize <= this.partSize) { + // simple PUT request, no multipart + const uploader = this.getUploader(bucketName, objectName, headers, false) + const buf = await fd.readFile() + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.enableSHA256) + return await uploader(buf, fileSize, sha256sum, md5sum) + } + + const previousUploadId = await this.findUploadId(bucketName, objectName) + let eTags: Part[] = [] + // if there was a previous incomplete upload, fetch all its uploaded parts info + let uploadId: string + if (previousUploadId) { + eTags = await this.listParts(bucketName, objectName, previousUploadId) + uploadId = previousUploadId + } else { + // there was no previous upload, initiate a new one + uploadId = await this.initiateNewMultipartUpload(bucketName, objectName, headers) + } + + { + const partSize = this.calculatePartSize(fileSize) + const uploader = this.getUploader(bucketName, objectName, headers, true) + // convert array to object to make things easy + const parts = eTags.reduce(function (acc, item) { + if (!acc[item.part]) { + acc[item.part] = item + } + return acc + }, {} as Record) + const partsDone: { part: number; etag: string }[] = [] + let partNumber = 1 + let uploadedSize = 0 + + // will be reused for hashing and uploading + // don't worry it's "unsafe", we will read data from fs to fill it + const buf = Buffer.allocUnsafe(this.partSize) + while (uploadedSize < fileSize) { + const part = parts[partNumber] + let length = partSize + if (length > fileSize - uploadedSize) { + length = fileSize - uploadedSize + } + + await fd.read(buf, 0, length, 0) + const { md5sum, sha256sum } = transformers.hashBinary(buf.subarray(0, length), this.enableSHA256) + + const md5sumHex = Buffer.from(md5sum, 'base64').toString('hex') + + if (part && md5sumHex === part.etag) { + // md5 matches, chunk already uploaded + partsDone.push({ part: partNumber, etag: part.etag }) + partNumber++ + uploadedSize += length + continue + } + + const objInfo = await uploader(uploadId, partNumber, buf.subarray(0, length), length, sha256sum, md5sum) + partsDone.push({ part: partNumber, etag: objInfo.etag }) + partNumber++ + uploadedSize += length + } + eTags = partsDone + } + + // at last, finish uploading + return this.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + } + + const ensureFileClose = async (executor: (fd: fsp.FileHandle) => Promise) => { + let fd + try { + fd = await fsp.open(filePath, 'r') + } catch (e) { + throw new Error(`failed to open file ${filePath}: err ${e}`, { cause: e }) + } + + try { + // make sure to keep await, otherwise file will be closed early. + return await executor(fd) + } finally { + await fd.close() + } + } + + return asCallback(apiCallback, ensureFileClose(executor)) + } + + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + /* eslint-disable @typescript-eslint/ban-ts-comment */ + + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + ): Promise<{ etag: string; versionId: string | null }> + + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + cb: ResultCallback<{ etag: string; versionId: string | null }>, + ): void + + // this call will aggregate the parts on the server into a single object. + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + cb?: ResultCallback<{ etag: string; versionId: string | null }>, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isObject(etags)) { + throw new TypeError('etags should be of type "Array"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + + const method = 'POST' + const query = `uploadId=${uriEscape(uploadId)}` + + const builder = new xml2js.Builder() + const payload = builder.buildObject({ + CompleteMultipartUpload: { + $: { + xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', + }, + Part: etags.map((etag) => { + return { + PartNumber: etag.part, + ETag: etag.etag, + } + }), + }, + }) + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }, payload) + const body = await readAsBuffer(res) + const result = xmlParsers.parseCompleteMultipart(body.toString()) + if (!result) { + throw new Error('BUG: failed to parse server response') + } + + if (result.errCode) { + // Multipart Complete API returns an error XML after a 200 http status + throw new errors.S3Error(result.errMessage) + } + + return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + etag: result.etag as string, + versionId: getVersionId(res.headers as ResponseHeader), + } + }) + } + + // Called by listIncompleteUploads to fetch a batch of incomplete uploads. + listIncompleteUploadsQuery( + bucketName: string, + prefix: string, + keyMarker: string, + uploadIdMarker: string, + delimiter: string, + ): stream.Transform { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(keyMarker)) { + throw new TypeError('keyMarker should be of type "string"') + } + if (!isString(uploadIdMarker)) { + throw new TypeError('uploadIdMarker should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + const queries = [] + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + + if (keyMarker) { + keyMarker = uriEscape(keyMarker) + queries.push(`key-marker=${keyMarker}`) + } + if (uploadIdMarker) { + queries.push(`upload-id-marker=${uploadIdMarker}`) + } + + const maxUploads = 1000 + queries.push(`max-uploads=${maxUploads}`) + queries.sort() + queries.unshift('uploads') + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + const method = 'GET' + const transformer = transformers.getListMultipartTransformer() + this.makeRequestAsync({ method, bucketName, query }, '', [200], '', true).then( + (response) => { + if (!response) { + throw new Error('BUG: no response') + } + + pipesetup(response, transformer) + }, + (e) => { + return transformer.emit('error', e) + }, + ) + return transformer + } + + public get extensions() { + return this.clientExtensions + } + + public async setCredentialsProvider(credentialsProvider: CredentialProvider) { + if (!(credentialsProvider instanceof CredentialProvider)) { + throw new Error('Unable to get credentials. Expected instance of CredentialProvider') + } + this.credentialsProvider = credentialsProvider + await this.checkAndRefreshCreds() + } + + private async fetchCredentials() { + if (this.credentialsProvider) { + const credential = await this.credentialsProvider.getCredentials() + if (credential) { + this.accessKey = credential.getAccessKey() + this.secretKey = credential.getSecretKey() + this.sessionToken = credential.getSessionToken() + } else { + throw new Error(`Unable to get credentials. Expected instance of BaseCredentialsProvider, get ${credential}`) + } + } else { + throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') + } + } + + /** + * Initiate a new multipart upload. + * @internal + */ + async initiateNewMultipartUpload(bucketName: string, objectName: string, headers: RequestHeaders): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(headers)) { + throw new errors.InvalidObjectNameError('contentType should be of type "object"') + } + const method = 'POST' + const query = 'uploads' + const res = await this.makeRequestAsync({ method, bucketName, objectName, query, headers }) + const body = await readAsBuffer(res) + return xmlParsers.parseInitiateMultipart(body.toString()) + } + + // TODO: this method some times will fail, and cause unhandled rejection error. + protected async checkAndRefreshCreds() { + if (this.credentialsProvider) { + return await this.fetchCredentials() + } + } + + /** + * gets the region of the bucket + * + * @param bucketName + * + * @internal + */ + protected async getBucketRegionAsync(bucketName: string): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) + } + + const me = this + + const executor = async (): Promise => { + // Region is set with constructor, return the region right here. + if (this.region) { + return this.region + } + + const cached = this.regionMap[bucketName] + if (cached) { + return cached + } + + const extractRegionAsync = async (response: IncomingMessage) => { + const body = await readAsString(response) + const region = xmlParsers.parseBucketRegion(body) + this.regionMap[bucketName] = region + return region + } + + const method = 'GET' + const query = 'location' + + // `getBucketLocation` behaves differently in following ways for + // different environments. + // + // - For nodejs env we default to path style requests. + // - For browser env path style requests on buckets yields CORS + // error. To circumvent this problem we make a virtual host + // style request signed with 'us-east-1'. This request fails + // with an error 'AuthorizationHeaderMalformed', additionally + // the error XML also provides Region of the bucket. To validate + // this region is proper we retry the same request with the newly + // obtained region. + const pathStyle = this.pathStyle && !isBrowser + + let region: string + + try { + const res = await me.makeRequestAsync({ method, bucketName, query, pathStyle }, '', [200], DEFAULT_REGION) + return extractRegionAsync(res) + } catch (e) { + if (!(e instanceof Error && e.name === 'AuthorizationHeaderMalformed')) { + throw e + } + // @ts-expect-error we set extra properties on error object + region = e.Region as string + if (!region) { + throw e + } + } + + const res = await me.makeRequestAsync({ method, bucketName, query, pathStyle }, '', [200], region) + return extractRegionAsync(res) + } + + return executor() + } + + findUploadId(bucketName: string, objectName: string, cb: ResultCallback): void + findUploadId(bucketName: string, objectName: string): Promise + findUploadId( + bucketName: string, + objectName: string, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + return asCallback( + cb, + new Promise((resolve, reject) => { + let latestUpload: string | undefined + const listNext = (keyMarker: string, uploadIdMarker: string) => { + this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '') + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + .on('error', (e) => reject(e)) + .on('data', (result) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + result.uploads.forEach((upload) => { + if (upload.key === objectName) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) { + latestUpload = upload + return + } + } + }) + if (result.isTruncated) { + listNext(result.nextKeyMarker as string, result.nextUploadIdMarker as string) + return + } + if (latestUpload) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return resolve(latestUpload.uploadId as string) + } + resolve(undefined) + }) + } + listNext('', '') + }), + ) + } + + // Stat information of the object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `statOpts` _object_ : Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional). + + statObject( + bucketName: string, + objectName: string, + statOpts: StatObjectOpts, + callback: ResultCallback, + ): void + statObject(bucketName: string, objectName: string, callback: ResultCallback): void + statObject(bucketName: string, objectName: string, statOpts?: StatObjectOpts): Promise + + statObject( + bucketName: string, + objectName: string, + statOptsOrCallback: StatObjectOpts | ResultCallback = {}, + callback?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let statOpts: StatObjectOpts = {} + let cb: ResultCallback | undefined + + // backward compatibility + if (typeof statOptsOrCallback === 'function') { + // statObject(bucketName, objectName, callback): void + statOpts = {} + cb = statOptsOrCallback + } else { + // statObject(bucketName, objectName, statOpts, callback): void + statOpts = statOptsOrCallback + cb = callback + } + + if (!isObject(statOpts)) { + throw new errors.InvalidArgumentError('statOpts should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const query = querystring.stringify(statOpts) + const method = 'HEAD' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + + // We drain the socket so that the connection gets closed. Note that this + // is not expensive as the socket will not have any data. + // HEAD request doesn't expect to have many response body + await drainResponse(res) + + const result: BucketItemStat = { + size: parseInt(res.headers['content-length'] as string), + metaData: extractMetadata(res.headers as ResponseHeader), + lastModified: new Date(res.headers['last-modified'] as string), + versionId: getVersionId(res.headers as ResponseHeader), + etag: sanitizeETag(res.headers.etag), + } + + return result + }) + } + + getUploader( + bucketName: string, + objectName: string, + extraHeaders: RequestHeaders, + multipart: false, + ): (buf: Buffer, length: number, sha256sum: string, md5sum: string) => Promise + getUploader( + bucketName: string, + objectName: string, + extraHeaders: RequestHeaders, + multipart: true, + ): ( + uploadId: string, + partNumber: number, + buf: Buffer, + length: number, + sha256sum: string, + md5sum: string, + ) => Promise + + // a part of the multipart. + getUploader(bucketName: string, objectName: string, extraHeaders: RequestHeaders, multipart: boolean) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isBoolean(multipart)) { + throw new TypeError('multipart should be of type "boolean"') + } + if (!isObject(extraHeaders)) { + throw new TypeError('metadata should be of type "object"') + } + + const validate = (stream: stream.Readable | Buffer, length: number, sha256sum: string, md5sum: string) => { + if (!(Buffer.isBuffer(stream) || isReadableStream(stream))) { + throw new TypeError('stream should be of type "Stream" or Buffer') + } + if (!isNumber(length)) { + throw new TypeError('length should be of type "number"') + } + if (!isString(sha256sum)) { + throw new TypeError('sha256sum should be of type "string"') + } + if (!isString(md5sum)) { + throw new TypeError('md5sum should be of type "string"') + } + } + + const simpleUploader = (buf: Buffer, length: number, sha256sum: string, md5sum: string) => { + validate(buf, length, sha256sum, md5sum) + return upload('', buf, length, sha256sum, md5sum) + } + + const multipartUploader = ( + uploadId: string, + partNumber: number, + buf: Buffer, + length: number, + sha256sum: string, + md5sum: string, + ) => { + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isNumber(partNumber)) { + throw new TypeError('partNumber should be of type "number"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('Empty uploadId') + } + if (!partNumber) { + throw new errors.InvalidArgumentError('partNumber cannot be 0') + } + validate(buf, length, sha256sum, md5sum) + const query = `partNumber=${partNumber}&uploadId=${uriEscape(uploadId)}` + return upload(query, buf, length, sha256sum, md5sum) + } + + const upload = async (query: string, stream: Buffer, length: number, sha256sum: string, md5sum: string) => { + const method = 'PUT' + let headers: RequestHeaders = { 'Content-Length': length } + + if (!multipart) { + headers = Object.assign({}, extraHeaders, headers) + } + + if (!this.enableSHA256) { + headers['Content-MD5'] = md5sum + } + + const response = await this.makeRequestStreamAsync( + { + method, + bucketName, + objectName, + query, + headers, + }, + stream, + sha256sum, + [200], + '', + false, + ) + return { + etag: sanitizeETag(response.headers.etag), + versionId: getVersionId(response.headers as ResponseHeader), + } + } + if (multipart) { + return multipartUploader + } + return simpleUploader + } + + // Get part-info of all parts of an incomplete upload specified by uploadId. + listParts(bucketName: string, objectName: string, uploadId: string): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + return new Promise((resolve, reject) => { + let parts: Part[] = [] + const listNext = (marker?: number) => { + this.listPartsQuery(bucketName, objectName, uploadId, marker) + .then((result) => { + parts = parts.concat(result.parts) + if (result.isTruncated) { + listNext(result.marker) + return + } + resolve(parts) + }) + .catch((e) => reject(e)) + } + listNext(0) + }) + } + + // Called by listParts to fetch a batch of part-info + async listPartsQuery(bucketName: string, objectName: string, uploadId: string, marker?: number) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isNumber(marker)) { + throw new TypeError('marker should be of type "number"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + let query = '' + if (marker && marker !== 0) { + query += `part-number-marker=${marker}&` + } + query += `uploadId=${uriEscape(uploadId)}` + + const method = 'GET' + + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseListParts(body.toString()) + } +} + +export async function uploadStream({ + client, + bucketName, + objectName, + headers, + stream: source, + partSize, +}: { + client: TypedBase + bucketName: string + objectName: string + headers: RequestHeaders + stream: stream.Readable + partSize: number +}): Promise { + // A map of the previously uploaded chunks, for resuming a file upload. This + // will be null if we aren't resuming an upload. + const oldParts: Record = {} + + // Keep track of the etags for aggregating the chunks together later. Each + // etag represents a single chunk of the file. + const eTags: Part[] = [] + + const previousUploadId = await client.findUploadId(bucketName, objectName) + let uploadId: string + if (!previousUploadId) { + uploadId = await client.initiateNewMultipartUpload(bucketName, objectName, headers) + } else { + uploadId = previousUploadId + const oldTags = await client.listParts(bucketName, objectName, previousUploadId) + oldTags.forEach((e) => { + oldTags[e.part] = e + }) + } + + const chunkier = new BlockStream2({ size: partSize, zeroPadding: false }) + + const [_, o] = await Promise.all([ + new Promise((resolve, reject) => { + source.pipe(chunkier) + chunkier.on('end', resolve) + source.on('error', reject) + chunkier.on('error', reject) + }), + (async () => { + let partNumber = 1 + + for await (const chunk of chunkier) { + const md5 = crypto.createHash('md5').update(chunk).digest() + + const oldPart = oldParts[partNumber] + if (oldPart) { + if (oldPart.etag === md5.toString('hex')) { + eTags.push({ part: partNumber, etag: oldPart.etag }) + partNumber++ + continue + } + } + + partNumber++ + + // now start to upload missing part + const options: RequestOption = { + method: 'PUT', + query: queryString.stringify({ partNumber, uploadId }), + headers: { + 'Content-Length': chunk.length, + 'Content-MD5': md5.toString('base64'), + }, + bucketName, + objectName, + } + + const response = await client.makeRequestAsyncOmit(options, chunk) + + let etag = response.headers.etag + if (etag) { + etag = etag.replace(/^"/, '').replace(/"$/, '') + } else { + etag = '' + } + + eTags.push({ part: partNumber, etag }) + } + + return await client.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + })(), + ]) + + return o +} diff --git a/src/xml-parsers.js b/src/xml-parsers.js deleted file mode 100644 index 96a66eed..00000000 --- a/src/xml-parsers.js +++ /dev/null @@ -1,561 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import crc32 from 'buffer-crc32' -import { XMLParser } from 'fast-xml-parser' - -import * as errors from './errors.ts' -import { SelectResults } from './helpers.ts' -import { - isObject, - parseXml, - readableStream, - sanitizeETag, - sanitizeObjectKey, - sanitizeSize, - toArray, -} from './internal/helper.ts' - -const fxpWithoutNumParser = new XMLParser({ - numberParseOptions: { - skipLike: /./, - }, -}) - -// parse XML response for copy object -export function parseCopyObject(xml) { - var result = { - etag: '', - lastModified: '', - } - - var xmlobj = parseXml(xml) - if (!xmlobj.CopyObjectResult) { - throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"') - } - xmlobj = xmlobj.CopyObjectResult - if (xmlobj.ETag) { - result.etag = xmlobj.ETag.replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - } - if (xmlobj.LastModified) { - result.lastModified = new Date(xmlobj.LastModified) - } - - return result -} - -// parse XML response for listing in-progress multipart uploads -export function parseListMultipart(xml) { - var result = { - uploads: [], - prefixes: [], - isTruncated: false, - } - - var xmlobj = parseXml(xml) - - if (!xmlobj.ListMultipartUploadsResult) { - throw new errors.InvalidXMLError('Missing tag: "ListMultipartUploadsResult"') - } - xmlobj = xmlobj.ListMultipartUploadsResult - if (xmlobj.IsTruncated) { - result.isTruncated = xmlobj.IsTruncated - } - if (xmlobj.NextKeyMarker) { - result.nextKeyMarker = xmlobj.NextKeyMarker - } - if (xmlobj.NextUploadIdMarker) { - result.nextUploadIdMarker = xmlobj.nextUploadIdMarker || '' - } - - if (xmlobj.CommonPrefixes) { - toArray(xmlobj.CommonPrefixes).forEach((prefix) => { - result.prefixes.push({ prefix: sanitizeObjectKey(toArray(prefix.Prefix)[0]) }) - }) - } - - if (xmlobj.Upload) { - toArray(xmlobj.Upload).forEach((upload) => { - var key = upload.Key - var uploadId = upload.UploadId - var initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } - var owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } - var storageClass = upload.StorageClass - var initiated = new Date(upload.Initiated) - result.uploads.push({ key, uploadId, initiator, owner, storageClass, initiated }) - }) - } - return result -} - -// parse XML response to list all the owned buckets - -// parse XML response for bucket notification -export function parseBucketNotification(xml) { - var result = { - TopicConfiguration: [], - QueueConfiguration: [], - CloudFunctionConfiguration: [], - } - // Parse the events list - var genEvents = function (events) { - var result = [] - if (events) { - toArray(events).forEach((s3event) => { - result.push(s3event) - }) - } - return result - } - // Parse all filter rules - var genFilterRules = function (filters) { - var result = [] - if (filters) { - filters = toArray(filters) - if (filters[0].S3Key) { - filters[0].S3Key = toArray(filters[0].S3Key) - if (filters[0].S3Key[0].FilterRule) { - toArray(filters[0].S3Key[0].FilterRule).forEach((rule) => { - var Name = toArray(rule.Name)[0] - var Value = toArray(rule.Value)[0] - result.push({ Name, Value }) - }) - } - } - } - return result - } - - var xmlobj = parseXml(xml) - xmlobj = xmlobj.NotificationConfiguration - - // Parse all topic configurations in the xml - if (xmlobj.TopicConfiguration) { - toArray(xmlobj.TopicConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var Topic = toArray(config.Topic)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) - result.TopicConfiguration.push({ Id, Topic, Event, Filter }) - }) - } - // Parse all topic configurations in the xml - if (xmlobj.QueueConfiguration) { - toArray(xmlobj.QueueConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var Queue = toArray(config.Queue)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) - result.QueueConfiguration.push({ Id, Queue, Event, Filter }) - }) - } - // Parse all QueueConfiguration arrays - if (xmlobj.CloudFunctionConfiguration) { - toArray(xmlobj.CloudFunctionConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var CloudFunction = toArray(config.CloudFunction)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) - result.CloudFunctionConfiguration.push({ Id, CloudFunction, Event, Filter }) - }) - } - - return result -} - -// parse XML response when a multipart upload is completed -export function parseCompleteMultipart(xml) { - var xmlobj = parseXml(xml).CompleteMultipartUploadResult - if (xmlobj.Location) { - var location = toArray(xmlobj.Location)[0] - var bucket = toArray(xmlobj.Bucket)[0] - var key = xmlobj.Key - var etag = xmlobj.ETag.replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - .replace(/^"/g, '') - .replace(/"$/g, '') - - return { location, bucket, key, etag } - } - // Complete Multipart can return XML Error after a 200 OK response - if (xmlobj.Code && xmlobj.Message) { - var errCode = toArray(xmlobj.Code)[0] - var errMessage = toArray(xmlobj.Message)[0] - return { errCode, errMessage } - } -} - -const formatObjInfo = (content, opts = {}) => { - let { Key, LastModified, ETag, Size, VersionId, IsLatest } = content - - if (!isObject(opts)) { - opts = {} - } - - const name = sanitizeObjectKey(toArray(Key)[0]) - const lastModified = new Date(toArray(LastModified)[0]) - const etag = sanitizeETag(toArray(ETag)[0]) - const size = sanitizeSize(Size) - - return { - name, - lastModified, - etag, - size, - versionId: VersionId, - isLatest: IsLatest, - isDeleteMarker: opts.IsDeleteMarker ? opts.IsDeleteMarker : false, - } -} - -// parse XML response for list objects in a bucket -export function parseListObjects(xml) { - var result = { - objects: [], - isTruncated: false, - } - let isTruncated = false - let nextMarker, nextVersionKeyMarker - const xmlobj = fxpWithoutNumParser.parse(xml) - - const parseCommonPrefixesEntity = (responseEntity) => { - if (responseEntity) { - toArray(responseEntity).forEach((commonPrefix) => { - result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) - }) - } - } - - const listBucketResult = xmlobj.ListBucketResult - const listVersionsResult = xmlobj.ListVersionsResult - - if (listBucketResult) { - if (listBucketResult.IsTruncated) { - isTruncated = listBucketResult.IsTruncated - } - if (listBucketResult.Contents) { - toArray(listBucketResult.Contents).forEach((content) => { - const name = sanitizeObjectKey(toArray(content.Key)[0]) - const lastModified = new Date(toArray(content.LastModified)[0]) - const etag = sanitizeETag(toArray(content.ETag)[0]) - const size = sanitizeSize(content.Size) - result.objects.push({ name, lastModified, etag, size }) - }) - } - - if (listBucketResult.NextMarker) { - nextMarker = listBucketResult.NextMarker - } - parseCommonPrefixesEntity(listBucketResult.CommonPrefixes) - } - - if (listVersionsResult) { - if (listVersionsResult.IsTruncated) { - isTruncated = listVersionsResult.IsTruncated - } - - if (listVersionsResult.Version) { - toArray(listVersionsResult.Version).forEach((content) => { - result.objects.push(formatObjInfo(content)) - }) - } - if (listVersionsResult.DeleteMarker) { - toArray(listVersionsResult.DeleteMarker).forEach((content) => { - result.objects.push(formatObjInfo(content, { IsDeleteMarker: true })) - }) - } - - if (listVersionsResult.NextKeyMarker) { - nextVersionKeyMarker = listVersionsResult.NextKeyMarker - } - if (listVersionsResult.NextVersionIdMarker) { - result.versionIdMarker = listVersionsResult.NextVersionIdMarker - } - parseCommonPrefixesEntity(listVersionsResult.CommonPrefixes) - } - - result.isTruncated = isTruncated - if (isTruncated) { - result.nextMarker = nextVersionKeyMarker || nextMarker - } - return result -} - -// parse XML response for list objects v2 in a bucket -export function parseListObjectsV2(xml) { - var result = { - objects: [], - isTruncated: false, - } - var xmlobj = parseXml(xml) - if (!xmlobj.ListBucketResult) { - throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') - } - xmlobj = xmlobj.ListBucketResult - if (xmlobj.IsTruncated) { - result.isTruncated = xmlobj.IsTruncated - } - if (xmlobj.NextContinuationToken) { - result.nextContinuationToken = xmlobj.NextContinuationToken - } - if (xmlobj.Contents) { - toArray(xmlobj.Contents).forEach((content) => { - var name = sanitizeObjectKey(toArray(content.Key)[0]) - var lastModified = new Date(content.LastModified) - var etag = sanitizeETag(content.ETag) - var size = content.Size - result.objects.push({ name, lastModified, etag, size }) - }) - } - if (xmlobj.CommonPrefixes) { - toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => { - result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) - }) - } - return result -} - -// parse XML response for list objects v2 with metadata in a bucket -export function parseListObjectsV2WithMetadata(xml) { - var result = { - objects: [], - isTruncated: false, - } - var xmlobj = parseXml(xml) - if (!xmlobj.ListBucketResult) { - throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') - } - xmlobj = xmlobj.ListBucketResult - if (xmlobj.IsTruncated) { - result.isTruncated = xmlobj.IsTruncated - } - if (xmlobj.NextContinuationToken) { - result.nextContinuationToken = xmlobj.NextContinuationToken - } - - if (xmlobj.Contents) { - toArray(xmlobj.Contents).forEach((content) => { - var name = sanitizeObjectKey(content.Key) - var lastModified = new Date(content.LastModified) - var etag = sanitizeETag(content.ETag) - var size = content.Size - var metadata - if (content.UserMetadata != null) { - metadata = toArray(content.UserMetadata)[0] - } else { - metadata = null - } - result.objects.push({ name, lastModified, etag, size, metadata }) - }) - } - - if (xmlobj.CommonPrefixes) { - toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => { - result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) - }) - } - return result -} - -export function parseBucketVersioningConfig(xml) { - var xmlObj = parseXml(xml) - return xmlObj.VersioningConfiguration -} -export function parseLifecycleConfig(xml) { - const xmlObj = parseXml(xml) - return xmlObj.LifecycleConfiguration -} -export function parseObjectRetentionConfig(xml) { - const xmlObj = parseXml(xml) - const retentionConfig = xmlObj.Retention - - return { - mode: retentionConfig.Mode, - retainUntilDate: retentionConfig.RetainUntilDate, - } -} - -export function parseBucketEncryptionConfig(xml) { - let encConfig = parseXml(xml) - return encConfig -} - -export function parseObjectLegalHoldConfig(xml) { - const xmlObj = parseXml(xml) - return xmlObj.LegalHold -} - -export function uploadPartParser(xml) { - const xmlObj = parseXml(xml) - const respEl = xmlObj.CopyPartResult - return respEl -} - -export function removeObjectsParser(xml) { - const xmlObj = parseXml(xml) - if (xmlObj.DeleteResult && xmlObj.DeleteResult.Error) { - // return errors as array always. as the response is object in case of single object passed in removeObjects - return toArray(xmlObj.DeleteResult.Error) - } - return [] -} - -export function parseSelectObjectContentResponse(res) { - // extractHeaderType extracts the first half of the header message, the header type. - function extractHeaderType(stream) { - const headerNameLen = Buffer.from(stream.read(1)).readUInt8() - const headerNameWithSeparator = Buffer.from(stream.read(headerNameLen)).toString() - const splitBySeparator = (headerNameWithSeparator || '').split(':') - const headerName = splitBySeparator.length >= 1 ? splitBySeparator[1] : '' - return headerName - } - - function extractHeaderValue(stream) { - const bodyLen = Buffer.from(stream.read(2)).readUInt16BE() - const bodyName = Buffer.from(stream.read(bodyLen)).toString() - return bodyName - } - - const selectResults = new SelectResults({}) // will be returned - - const responseStream = readableStream(res) // convert byte array to a readable responseStream - while (responseStream._readableState.length) { - // Top level responseStream read tracker. - let msgCrcAccumulator // accumulate from start of the message till the message crc start. - - const totalByteLengthBuffer = Buffer.from(responseStream.read(4)) - msgCrcAccumulator = crc32(totalByteLengthBuffer) - - const headerBytesBuffer = Buffer.from(responseStream.read(4)) - msgCrcAccumulator = crc32(headerBytesBuffer, msgCrcAccumulator) - - const calculatedPreludeCrc = msgCrcAccumulator.readInt32BE() // use it to check if any CRC mismatch in header itself. - - const preludeCrcBuffer = Buffer.from(responseStream.read(4)) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc) - msgCrcAccumulator = crc32(preludeCrcBuffer, msgCrcAccumulator) - - const totalMsgLength = totalByteLengthBuffer.readInt32BE() - const headerLength = headerBytesBuffer.readInt32BE() - const preludeCrcByteValue = preludeCrcBuffer.readInt32BE() - - if (preludeCrcByteValue !== calculatedPreludeCrc) { - // Handle Header CRC mismatch Error - throw new Error( - `Header Checksum Mismatch, Prelude CRC of ${preludeCrcByteValue} does not equal expected CRC of ${calculatedPreludeCrc}`, - ) - } - - const headers = {} - if (headerLength > 0) { - const headerBytes = Buffer.from(responseStream.read(headerLength)) - msgCrcAccumulator = crc32(headerBytes, msgCrcAccumulator) - const headerReaderStream = readableStream(headerBytes) - while (headerReaderStream._readableState.length) { - let headerTypeName = extractHeaderType(headerReaderStream) - headerReaderStream.read(1) // just read and ignore it. - headers[headerTypeName] = extractHeaderValue(headerReaderStream) - } - } - - let payloadStream - const payLoadLength = totalMsgLength - headerLength - 16 - if (payLoadLength > 0) { - const payLoadBuffer = Buffer.from(responseStream.read(payLoadLength)) - msgCrcAccumulator = crc32(payLoadBuffer, msgCrcAccumulator) - // read the checksum early and detect any mismatch so we can avoid unnecessary further processing. - const messageCrcByteValue = Buffer.from(responseStream.read(4)).readInt32BE() - const calculatedCrc = msgCrcAccumulator.readInt32BE() - // Handle message CRC Error - if (messageCrcByteValue !== calculatedCrc) { - throw new Error( - `Message Checksum Mismatch, Message CRC of ${messageCrcByteValue} does not equal expected CRC of ${calculatedCrc}`, - ) - } - payloadStream = readableStream(payLoadBuffer) - } - - const messageType = headers['message-type'] - - switch (messageType) { - case 'error': { - const errorMessage = headers['error-code'] + ':"' + headers['error-message'] + '"' - throw new Error(errorMessage) - } - case 'event': { - const contentType = headers['content-type'] - const eventType = headers['event-type'] - - switch (eventType) { - case 'End': { - selectResults.setResponse(res) - return selectResults - } - - case 'Records': { - const readData = payloadStream.read(payLoadLength) - selectResults.setRecords(readData) - break - } - - case 'Progress': - { - switch (contentType) { - case 'text/xml': { - const progressData = payloadStream.read(payLoadLength) - selectResults.setProgress(progressData.toString()) - break - } - default: { - const errorMessage = `Unexpected content-type ${contentType} sent for event-type Progress` - throw new Error(errorMessage) - } - } - } - break - case 'Stats': - { - switch (contentType) { - case 'text/xml': { - const statsData = payloadStream.read(payLoadLength) - selectResults.setStats(statsData.toString()) - break - } - default: { - const errorMessage = `Unexpected content-type ${contentType} sent for event-type Stats` - throw new Error(errorMessage) - } - } - } - break - default: { - // Continuation message: Not sure if it is supported. did not find a reference or any message in response. - // It does not have a payload. - const warningMessage = `Un implemented event detected ${messageType}.` - // eslint-disable-next-line no-console - console.warn(warningMessage) - } - } // eventType End - } // Event End - } // messageType End - } // Top Level Stream End -} diff --git a/src/xml-parsers.ts b/src/xml-parsers.ts new file mode 100644 index 00000000..b28ece35 --- /dev/null +++ b/src/xml-parsers.ts @@ -0,0 +1,841 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as crc32 from 'crc-32' +import { XMLParser } from 'fast-xml-parser' + +import * as errors from './errors.ts' +import type { RETENTION_MODES } from './helpers.ts' +import { SelectResults } from './helpers.ts' +import { isObject, parseXml, sanitizeETag, sanitizeObjectKey, sanitizeSize, toArray } from './internal/helper.ts' +import type { BucketItemCopy, BucketItemFromList, ObjectMetaData, Retention, UploadID } from './internal/type.ts' +import { RETENTION_VALIDITY_UNITS } from './internal/type.ts' + +const fxp = new XMLParser() + +// Parse XML and return information as Javascript types +// parse error XML response +export function parseError(xml: string, headerInfo: Record) { + let xmlErr = {} + const xmlObj = fxp.parse(xml) + if (xmlObj.Error) { + xmlErr = xmlObj.Error + } + + const e = new errors.S3Error() as unknown as Record + Object.entries(xmlErr).forEach(([key, value]) => { + e[key.toLowerCase()] = value + }) + + Object.entries(headerInfo).forEach(([key, value]) => { + e[key] = value + }) + + return e +} + +// parse XML response for copy object +export function parseCopyObject(xml: string): BucketItemCopy { + const result: { etag: string; lastModified?: Date } = { + etag: '', + } + + let xmlobj = parseXml(xml) + if (!xmlobj.CopyObjectResult) { + throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"') + } + xmlobj = xmlobj.CopyObjectResult + if (xmlobj.ETag) { + result.etag = xmlobj.ETag.replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + } + if (xmlobj.LastModified) { + result.lastModified = new Date(xmlobj.LastModified) + } + + // @ts-ignore + return result +} + +// parse XML response for listing in-progress multipart uploads +export function parseListMultipart(xml: string) { + const result = { + uploads: [] as { + key: string + uploadId: UploadID + initiator: unknown + owner: unknown + storageClass: unknown + initiated: unknown + }[], + prefixes: [] as { prefix: string }[], + isTruncated: false, + nextKeyMarker: undefined, + nextUploadIdMarker: undefined, + } + + let xmlobj = parseXml(xml) + + if (!xmlobj.ListMultipartUploadsResult) { + throw new errors.InvalidXMLError('Missing tag: "ListMultipartUploadsResult"') + } + xmlobj = xmlobj.ListMultipartUploadsResult + if (xmlobj.IsTruncated) { + result.isTruncated = xmlobj.IsTruncated + } + if (xmlobj.NextKeyMarker) { + result.nextKeyMarker = xmlobj.NextKeyMarker + } + if (xmlobj.NextUploadIdMarker) { + result.nextUploadIdMarker = xmlobj.nextUploadIdMarker || '' + } + + if (xmlobj.CommonPrefixes) { + toArray(xmlobj.CommonPrefixes).forEach((prefix) => { + // @ts-expect-error index check + result.prefixes.push({ prefix: sanitizeObjectKey(toArray(prefix.Prefix)[0]) }) + }) + } + + if (xmlobj.Upload) { + toArray(xmlobj.Upload).forEach((upload) => { + const key = upload.Key + const uploadId = upload.UploadId + const initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } + const owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } + const storageClass = upload.StorageClass + const initiated = new Date(upload.Initiated) + result.uploads.push({ key, uploadId, initiator, owner, storageClass, initiated }) + }) + } + return result +} + +// parse XML response to list all the owned buckets +export function parseListBucket(xml: string): BucketItemFromList[] { + const result: BucketItemFromList[] = [] + let xmlobj = parseXml(xml) + + if (!xmlobj.ListAllMyBucketsResult) { + throw new errors.InvalidXMLError('Missing tag: "ListAllMyBucketsResult"') + } + xmlobj = xmlobj.ListAllMyBucketsResult + + if (xmlobj.Buckets) { + if (xmlobj.Buckets.Bucket) { + toArray(xmlobj.Buckets.Bucket).forEach((bucket) => { + const name = bucket.Name + const creationDate = new Date(bucket.CreationDate) + result.push({ name, creationDate }) + }) + } + } + return result +} + +// parse XML response for bucket notification +export function parseBucketNotification(xml: string): any { + const result = { + TopicConfiguration: [] as unknown[], + QueueConfiguration: [] as unknown[], + CloudFunctionConfiguration: [] as unknown[], + } + // Parse the events list + const genEvents = function (events: any) { + const result = [] + if (events) { + result.push(...toArray(events)) + } + return result + } + // Parse all filter rules + const genFilterRules = function (filters: any) { + const result: { Name: string; Value: string }[] = [] + if (filters) { + filters = toArray(filters) + if (filters[0].S3Key) { + filters[0].S3Key = toArray(filters[0].S3Key) + if (filters[0].S3Key[0].FilterRule) { + toArray(filters[0].S3Key[0].FilterRule).forEach((rule) => { + const Name = toArray(rule.Name)[0] + const Value = toArray(rule.Value)[0] + result.push({ Name, Value }) + }) + } + } + } + return result + } + + let xmlobj = parseXml(xml) + xmlobj = xmlobj.NotificationConfiguration + + // Parse all topic configurations in the xml + if (xmlobj.TopicConfiguration) { + toArray(xmlobj.TopicConfiguration).forEach((config) => { + const Id = toArray(config.Id)[0] + const Topic = toArray(config.Topic)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) + result.TopicConfiguration.push({ Id, Topic, Event, Filter }) + }) + } + // Parse all topic configurations in the xml + if (xmlobj.QueueConfiguration) { + toArray(xmlobj.QueueConfiguration).forEach((config) => { + const Id = toArray(config.Id)[0] + const Queue = toArray(config.Queue)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) + result.QueueConfiguration.push({ Id, Queue, Event, Filter }) + }) + } + // Parse all QueueConfiguration arrays + if (xmlobj.CloudFunctionConfiguration) { + toArray(xmlobj.CloudFunctionConfiguration).forEach((config) => { + const Id = toArray(config.Id)[0] + const CloudFunction = toArray(config.CloudFunction)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) + result.CloudFunctionConfiguration.push({ Id, CloudFunction, Event, Filter }) + }) + } + + return result +} + +// parse XML response for bucket region +export function parseBucketRegion(xml: string) { + // return region information + return parseXml(xml).LocationConstraint +} + +export type Part = { + part: number + lastModified?: Date + etag: string +} + +// parse XML response for list parts of an in progress multipart upload +export function parseListParts(xml: string): { isTruncated: boolean; marker: number | undefined; parts: Part[] } { + let xmlobj = parseXml(xml) + const result: { isTruncated: boolean; marker: number | undefined; parts: Part[] } = { + isTruncated: false, + parts: [], + marker: undefined as number | undefined, + } + if (!xmlobj.ListPartsResult) { + throw new errors.InvalidXMLError('Missing tag: "ListPartsResult"') + } + xmlobj = xmlobj.ListPartsResult + if (xmlobj.IsTruncated) { + result.isTruncated = xmlobj.IsTruncated + } + if (xmlobj.NextPartNumberMarker) { + result.marker = toArray(xmlobj.NextPartNumberMarker)[0] + } + if (xmlobj.Part) { + toArray(xmlobj.Part).forEach((p) => { + const part = +toArray(p.PartNumber)[0] + const lastModified = new Date(p.LastModified) + const etag = p.ETag.replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + result.parts.push({ part, lastModified, etag }) + }) + } + return result +} + +// parse XML response when a new multipart upload is initiated +export function parseInitiateMultipart(xml: string) { + let xmlobj = parseXml(xml) + + if (!xmlobj.InitiateMultipartUploadResult) { + throw new errors.InvalidXMLError('Missing tag: "InitiateMultipartUploadResult"') + } + xmlobj = xmlobj.InitiateMultipartUploadResult + + if (xmlobj.UploadId) { + return xmlobj.UploadId + } + throw new errors.InvalidXMLError('Missing tag: "UploadId"') +} + +export type MultipartResult = + | { errCode: string; errMessage: string } + | { + errCode?: undefined // this help TS to narrow type + etag: string + key: string + bucket: string + location: string + } + +// parse XML response when a multipart upload is completed +export function parseCompleteMultipart(xml: string) { + const xmlobj = parseXml(xml).CompleteMultipartUploadResult + if (xmlobj.Location) { + const location = toArray(xmlobj.Location)[0] + const bucket = toArray(xmlobj.Bucket)[0] + const key = xmlobj.Key + const etag = xmlobj.ETag.replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + .replace(/^"/g, '') + .replace(/"$/g, '') + + return { location, bucket, key, etag } + } + // Complete Multipart can return XML Error after a 200 OK response + if (xmlobj.Code && xmlobj.Message) { + const errCode = toArray(xmlobj.Code)[0] + const errMessage = toArray(xmlobj.Message)[0] + return { errCode, errMessage } + } +} + +type ListedObject = { + Key: string + LastModified: string + ETag: string + Size: number + VersionId?: string + IsLatest?: boolean +} + +const formatObjInfo = (content: ListedObject, opts: { IsDeleteMarker?: boolean } = {}) => { + const { Key, LastModified, ETag, Size, VersionId, IsLatest } = content + + if (!isObject(opts)) { + opts = {} + } + + // @ts-expect-error index check + const name = sanitizeObjectKey(toArray(Key)[0]) + // @ts-expect-error index check + const lastModified = new Date(toArray(LastModified)[0]) + const etag = sanitizeETag(toArray(ETag)[0]) + // @ts-ignore + const size = sanitizeSize(Size) + + return { + name, + lastModified, + etag, + size, + versionId: VersionId, + isLatest: IsLatest, + isDeleteMarker: opts.IsDeleteMarker ? opts.IsDeleteMarker : false, + } +} + +export type S3ListObject = + | { prefix: string; size: number } + | { name: string; size: number } // sometime api return this, not sure if it's valid + | { + name: string + lastModified: Date + etag: string + size: number + isDeleteMarker?: boolean + isLatest?: boolean + } + +type ListObjectResponse = { + nextMarker?: string + versionIdMarker?: string + objects: S3ListObject[] + isTruncated: boolean + nextContinuationToken?: string +} + +// parse XML response for list objects in a bucket +export function parseListObjects(xml: string) { + const result: ListObjectResponse = { + objects: [], + isTruncated: false, + } + let isTruncated = false + let nextMarker, nextVersionKeyMarker + const xmlobj = parseXml(xml) as { + ListBucketResult?: { + CommonPrefixes: { Prefix: string } + IsTruncated: boolean + NextMarker?: string + Contents: Array<{ Key: string; LastModified: string; ETag: string; Size: number }> + } + ListVersionsResult?: { + CommonPrefixes: unknown + NextKeyMarker?: string + NextVersionIdMarker?: string + Version: Array + DeleteMarker?: Array + IsTruncated: boolean + } + } + + const parseCommonPrefixesEntity = (responseEntity: any) => { + if (responseEntity) { + toArray(responseEntity).forEach((commonPrefix) => { + result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) + }) + } + } + + // https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjects.html + const listBucketResult = xmlobj.ListBucketResult + // https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectVersions.html + const listVersionsResult = xmlobj.ListVersionsResult + + if (listBucketResult) { + if (listBucketResult.IsTruncated) { + isTruncated = listBucketResult.IsTruncated + } + if (listBucketResult.Contents) { + toArray(listBucketResult.Contents).forEach((content) => { + const name = sanitizeObjectKey(content.Key) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) + const size = content.Size + result.objects.push({ name, lastModified, etag, size }) + }) + } + + if (listBucketResult.NextMarker) { + nextMarker = listBucketResult.NextMarker + } + parseCommonPrefixesEntity(listBucketResult.CommonPrefixes) + } + + if (listVersionsResult) { + if (listVersionsResult.IsTruncated) { + isTruncated = listVersionsResult.IsTruncated + } + + if (listVersionsResult.Version) { + toArray(listVersionsResult.Version).forEach((content) => { + // @ts-ignore + result.objects.push(formatObjInfo(content)) + }) + } + if (listVersionsResult.DeleteMarker) { + toArray(listVersionsResult.DeleteMarker).forEach((content) => { + // @ts-ignore + result.objects.push(formatObjInfo(content, { IsDeleteMarker: true })) + }) + } + + if (listVersionsResult.NextKeyMarker) { + nextVersionKeyMarker = listVersionsResult.NextKeyMarker + } + if (listVersionsResult.NextVersionIdMarker) { + result.versionIdMarker = listVersionsResult.NextVersionIdMarker + } + parseCommonPrefixesEntity(listVersionsResult.CommonPrefixes) + } + + result.isTruncated = isTruncated + if (isTruncated) { + result.nextMarker = nextVersionKeyMarker || nextMarker + } + return result +} + +// parse XML response for list objects v2 in a bucket +export function parseListObjectsV2(xml: string) { + const result: { + objects: ( + | { prefix: string; size: number } + | { + name: string + lastModified: Date + etag: string + size: number + } + )[] + isTruncated: boolean + nextContinuationToken?: string + } = { + objects: [], + isTruncated: false, + } + + let xmlobj = parseXml(xml) + if (!xmlobj.ListBucketResult) { + throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') + } + xmlobj = xmlobj.ListBucketResult + if (xmlobj.IsTruncated) { + result.isTruncated = xmlobj.IsTruncated + } + if (xmlobj.NextContinuationToken) { + result.nextContinuationToken = xmlobj.NextContinuationToken + } + if (xmlobj.Contents) { + toArray(xmlobj.Contents).forEach((content) => { + const name = sanitizeObjectKey(toArray(content.Key)[0]) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) + const size = content.Size + result.objects.push({ name, lastModified, etag, size }) + }) + } + if (xmlobj.CommonPrefixes) { + toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => { + result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) + }) + } + return result +} + +export function parseListObjectsV2WithMetadata(xml: string) { + const result: { + objects: ( + | { prefix: string; size: number } + | { + name: string + lastModified: Date + etag: string + size: number + metadata: ObjectMetaData | null + } + )[] + isTruncated: boolean + nextContinuationToken?: string + } = { + objects: [], + isTruncated: false, + } + + let xmlobj = parseXml(xml) + if (!xmlobj.ListBucketResult) { + throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') + } + xmlobj = xmlobj.ListBucketResult + if (xmlobj.IsTruncated) { + result.isTruncated = xmlobj.IsTruncated + } + if (xmlobj.NextContinuationToken) { + result.nextContinuationToken = xmlobj.NextContinuationToken + } + + if (xmlobj.Contents) { + toArray(xmlobj.Contents).forEach((content) => { + const name = sanitizeObjectKey(content.Key) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) + const size = content.Size + let metadata + if (content.UserMetadata != null) { + metadata = toArray(content.UserMetadata)[0] + } else { + metadata = null + } + result.objects.push({ name, lastModified, etag, size, metadata }) + }) + } + + if (xmlobj.CommonPrefixes) { + toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => { + result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) + }) + } + return result +} + +export function parseBucketVersioningConfig(xml: string) { + const xmlObj = parseXml(xml) + return xmlObj.VersioningConfiguration +} + +export function parseTagging(xml: string) { + const xmlObj = parseXml(xml) + let result = [] + if (xmlObj.Tagging && xmlObj.Tagging.TagSet && xmlObj.Tagging.TagSet.Tag) { + const tagResult = xmlObj.Tagging.TagSet.Tag + // if it is a single tag convert into an array so that the return value is always an array. + if (isObject(tagResult)) { + result.push(tagResult) + } else { + result = tagResult + } + } + return result +} + +export function parseLifecycleConfig(xml: string) { + const xmlObj = parseXml(xml) + return xmlObj.LifecycleConfiguration +} + +export type ObjectLockConfig = { + mode?: RETENTION_MODES + objectLockEnabled?: 'Enabled' + unit?: RETENTION_VALIDITY_UNITS + validity?: number +} + +export function parseObjectLockConfig(xml: string): ObjectLockConfig | undefined { + const xmlObj = parseXml(xml) + let lockConfigResult: ObjectLockConfig = {} + if (xmlObj.ObjectLockConfiguration) { + lockConfigResult = { + objectLockEnabled: xmlObj.ObjectLockConfiguration.ObjectLockEnabled, + } + let retentionResp + if ( + xmlObj.ObjectLockConfiguration && + xmlObj.ObjectLockConfiguration.Rule && + xmlObj.ObjectLockConfiguration.Rule.DefaultRetention + ) { + retentionResp = xmlObj.ObjectLockConfiguration.Rule.DefaultRetention || {} + lockConfigResult.mode = retentionResp.Mode + } + if (retentionResp) { + const isUnitYears = retentionResp.Years + if (isUnitYears) { + lockConfigResult.validity = isUnitYears + lockConfigResult.unit = RETENTION_VALIDITY_UNITS.YEARS + } else { + lockConfigResult.validity = retentionResp.Days + lockConfigResult.unit = RETENTION_VALIDITY_UNITS.DAYS + } + } + return lockConfigResult + } +} + +export function parseObjectRetentionConfig(xml: string) { + const xmlObj = parseXml(xml) + const retentionConfig = xmlObj.Retention + + return { + mode: retentionConfig.Mode, + retainUntilDate: retentionConfig.RetainUntilDate, + } as Retention +} + +export function parseBucketEncryptionConfig(xml: string) { + return parseXml(xml) +} + +export function parseReplicationConfig(xml: string) { + const xmlObj = parseXml(xml) + + const replicationConfig = { + ReplicationConfiguration: { + role: xmlObj.ReplicationConfiguration.Role, + rules: toArray(xmlObj.ReplicationConfiguration.Rule), + }, + } + + return replicationConfig +} + +export function parseObjectLegalHoldConfig(xml: string) { + const xmlObj = parseXml(xml) + return xmlObj.LegalHold +} + +export function uploadPartParser(xml: string) { + const xmlObj = parseXml(xml) + const respEl = xmlObj.CopyPartResult + return respEl +} + +export function removeObjectsParser(xml: string) { + const xmlObj = parseXml(xml) + if (xmlObj.DeleteResult && xmlObj.DeleteResult.Error) { + // return errors as array always. as the response is object in case of single object passed in removeObjects + return toArray(xmlObj.DeleteResult.Error) + } + return [] +} + +class ReadableBuffer { + private buf: Buffer + + public readLoc: number + + constructor(buf: Buffer) { + this.buf = buf + this.readLoc = 0 + } + + read(size: number): Buffer { + const sub = this.buf.subarray(this.readLoc, this.readLoc + size) + this.readLoc += size + return sub + } + + notEnd(): boolean { + return this.readLoc < this.buf.length + } +} + +export function parseSelectObjectContentResponse(res: Buffer): SelectResults { + // extractHeaderType extracts the first half of the header message, the header type. + function extractHeaderType(stream: ReadableBuffer): string { + const headerNameLen = stream.read(1).readUInt8() + const headerNameWithSeparator = stream.read(headerNameLen).toString() + + const [_, name] = headerNameWithSeparator.split(':') + return name || '' + } + + function extractHeaderValue(stream: ReadableBuffer) { + const bodyLen = stream.read(2).readUInt16BE() + return stream.read(bodyLen).toString() + } + + const selectResults = new SelectResults({}) // will be returned + + const responseStream = new ReadableBuffer(res) // convert byte array to a readable responseStream + while (responseStream.notEnd()) { + const totalByteLengthBuffer = responseStream.read(4) + let msgCrcAccumulator = crc32.buf(totalByteLengthBuffer) + + const headerBytesBuffer = responseStream.read(4) + msgCrcAccumulator = crc32.buf(headerBytesBuffer, msgCrcAccumulator) + + const calculatedPreludeCrc = msgCrcAccumulator // use it to check if any CRC mismatch in header itself. + + const preludeCrcBuffer = responseStream.read(4) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc) + msgCrcAccumulator = crc32.buf(preludeCrcBuffer, msgCrcAccumulator) + + const totalMsgLength = totalByteLengthBuffer.readInt32BE() + const headerLength = headerBytesBuffer.readInt32BE() + const preludeCrcByteValue = preludeCrcBuffer.readInt32BE() + + if (preludeCrcByteValue !== calculatedPreludeCrc) { + // Handle Header CRC mismatch Error + throw new Error( + `Header Checksum Mismatch, Prelude CRC of ${preludeCrcByteValue} does not equal expected CRC of ${calculatedPreludeCrc}`, + ) + } + + const headers: Record = {} + + if (headerLength > 0) { + const headerBytes = responseStream.read(headerLength) + msgCrcAccumulator = crc32.buf(headerBytes, msgCrcAccumulator) + const headerReaderStream = new ReadableBuffer(headerBytes) + while (headerReaderStream.notEnd()) { + const headerTypeName = extractHeaderType(headerReaderStream) + headerReaderStream.read(1) // just read and ignore it. + headers[headerTypeName] = extractHeaderValue(headerReaderStream) + } + } + + let payloadStream: ReadableBuffer + const payLoadLength = totalMsgLength - headerLength - 16 + if (payLoadLength > 0) { + const payLoadBuffer = responseStream.read(payLoadLength) + msgCrcAccumulator = crc32.buf(payLoadBuffer, msgCrcAccumulator) + // read the checksum early and detect any mismatch so we can avoid unnecessary further processing. + const messageCrcByteValue = responseStream.read(4).readInt32BE() + const calculatedCrc = msgCrcAccumulator + // Handle message CRC Error + if (messageCrcByteValue !== calculatedCrc) { + throw new Error( + `Message Checksum Mismatch, Message CRC of ${messageCrcByteValue} does not equal expected CRC of ${calculatedCrc}`, + ) + } + payloadStream = new ReadableBuffer(payLoadBuffer) + } + + const messageType = headers['message-type'] + + switch (messageType) { + case 'error': { + const errorMessage = `${headers['error-code']}:"${headers['error-message']}"` + throw new Error(errorMessage) + } + case 'event': { + const contentType = headers['content-type'] + const eventType = headers['event-type'] + + switch (eventType) { + case 'End': { + selectResults.setResponse(res) + return selectResults + } + + case 'Records': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const readData = payloadStream.read(payLoadLength) + selectResults.setRecords(readData) + break + } + + case 'Progress': + { + switch (contentType) { + case 'text/xml': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const progressData = payloadStream.read(payLoadLength) + selectResults.setProgress(progressData.toString()) + break + } + default: { + const errorMessage = `Unexpected content-type ${contentType} sent for event-type Progress` + throw new Error(errorMessage) + } + } + } + break + case 'Stats': + { + switch (contentType) { + case 'text/xml': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const statsData = payloadStream.read(payLoadLength) + selectResults.setStats(statsData.toString()) + break + } + default: { + const errorMessage = `Unexpected content-type ${contentType} sent for event-type Stats` + throw new Error(errorMessage) + } + } + } + break + default: { + // Continuation message: Not sure if it is supported. did not find a reference or any message in response. + // It does not have a payload. + const warningMessage = `Un implemented event detected ${messageType}.` + // eslint-disable-next-line no-console + console.warn(warningMessage) + } + } // eventType End + } // Event End + } // messageType End + } // Top Level Stream End + + throw new Error('unexpected end of stream') +} diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index 03e3a853..504238d1 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -33,14 +33,14 @@ import * as uuid from 'uuid' import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.ts' import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION } from '../../src/helpers.ts' import { getVersionId } from '../../src/internal/helper.ts' -import * as minio from '../../src/minio.js' +import * as minio from '../../src/minio.ts' const assert = chai.assert const isWindowsPlatform = process.platform === 'win32' describe('functional tests', function () { - this.timeout(30 * 60 * 1000) + this.timeout(10 * 60 * 1000) var clientConfigParams = {} var region_conf_env = process.env['MINIO_REGION'] @@ -69,7 +69,7 @@ describe('functional tests', function () { console.error(`Error: SECRET_KEY Environment variable is not set`) process.exit(1) } - clientConfigParams.useSSL = enable_https_env == '1' + clientConfigParams.useSSL = enable_https_env === '1' } else { // If credentials aren't given, default to play.min.io. clientConfigParams.endPoint = 'play.min.io' @@ -103,7 +103,9 @@ describe('functional tests', function () { if (trace_func_test_file_path === 'process.stdout') { traceStream = process.stdout } else { - traceStream = fs.createWriteStream(trace_func_test_file_path, { flags: 'a' }) + traceStream = fs.createWriteStream(trace_func_test_file_path, { + flags: 'a', + }) } traceStream.write('====================================\n') client.traceOn(traceStream) @@ -134,7 +136,9 @@ describe('functional tests', function () { var _5mbmd5 = crypto.createHash('md5').update(_5mb).digest('hex') // create new http agent to check requests release sockets - var httpAgent = (clientConfigParams.useSSL ? https : http).Agent({ keepAlive: true }) + var httpAgent = (clientConfigParams.useSSL ? https : http).Agent({ + keepAlive: true, + }) client.setRequestOptions({ agent: httpAgent }) var metaData = { 'Content-Type': 'text/html', @@ -332,7 +336,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUpload, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUpload, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -346,7 +350,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) var tmpFileUploadWithExt = `${tmpDir}/${_100kbObjectName}.txt` step( @@ -355,7 +359,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUploadWithExt, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, metaData, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -371,7 +375,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) step( `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUploadWithExt}_`, @@ -379,7 +383,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUploadWithExt, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -393,7 +397,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}, metaData:${metaData}_`, @@ -401,7 +405,7 @@ describe('functional tests', function () { var stream = readableStream(_100kb) client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, metaData, done) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}_`, @@ -409,7 +413,7 @@ describe('functional tests', function () { var stream = readableStream(_100kb) client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, @@ -429,14 +433,14 @@ describe('functional tests', function () { }) }) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_`, (done) => { - client.putObject(bucketName, _100kbObjectBufferName, _100kb, '', done) + client.putObject(bucketName, _100kbObjectBufferName, _100kb, done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, @@ -456,7 +460,7 @@ describe('functional tests', function () { }) }) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, metaData)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_, metaData:{}`, @@ -466,7 +470,7 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:0, length=1024_`, @@ -479,7 +483,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024, length=1024_`, @@ -499,7 +503,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024`, @@ -518,7 +522,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, @@ -531,22 +535,27 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, metadata, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, - (done) => { - var stream = readableStream(_65mb) - client.putObject(bucketName, _65mbObjectName, stream, metaData, () => { - setTimeout(() => { - if (Object.values(httpAgent.sockets).length === 0) { - return done() - } - done(new Error('http request did not release network socket')) - }, 100) - }) + async () => { + const stream = readableStream(_65mb) + await client.putObject(bucketName, _65mbObjectName, stream, metaData) + + for (;;) { + await new Promise((resolve) => { + setTimeout(() => { + resolve() + }), + 100 + }) + if (Object.values(httpAgent.sockets).length === 0) { + return + } + } }, - ) + ).timeout(15000) step(`getObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { var hash = crypto.createHash('md5') @@ -686,6 +695,21 @@ describe('functional tests', function () { }, ) + step( + `statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, + async () => { + const stat = await client.statObject(bucketName, _100kbObjectName) + if (stat.size !== _100kb.length) { + throw new Error('size mismatch') + } + assert.equal(stat.metaData['content-type'], metaData['Content-Type']) + assert.equal(stat.metaData['Testing'], metaData['Testing']) + assert.equal(stat.metaData['randomstuff'], metaData['randomstuff']) + etag = stat.etag + modifiedDate = stat.modifiedDate + }, + ) + step( `copyObject(bucketName, objectName, srcObject, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}_`, (done) => { @@ -698,23 +722,6 @@ describe('functional tests', function () { }, ) - step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { - client.statObject(bucketName, _100kbObjectName, (e, stat) => { - if (e) { - return done(e) - } - if (stat.size !== _100kb.length) { - return done(new Error('size mismatch')) - } - assert.equal(stat.metaData['content-type'], metaData['Content-Type']) - assert.equal(stat.metaData['Testing'], metaData['Testing']) - assert.equal(stat.metaData['randomstuff'], metaData['randomstuff']) - etag = stat.etag - modifiedDate = stat.modifiedDate - done() - }) - }) - step( `copyObject(bucketName, objectName, srcObject, conditions, cb)_bucketName:${bucketName}, objectName:${_100kbObjectNameCopy}, srcObject:/${bucketName}/${_100kbObjectName}, conditions:ExceptIncorrectEtag_`, (done) => { @@ -938,15 +945,12 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `fPutObject(bucketName, objectName, filePath, metaData)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}_`, - (done) => { - client - .fPutObject(bucketName, _65mbObjectName, tmpFileUpload) - .then(() => done()) - .catch(done) + async () => { + await client.fPutObject(bucketName, _65mbObjectName, tmpFileUpload) }, ) @@ -958,7 +962,7 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `removeObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, @@ -967,7 +971,7 @@ describe('functional tests', function () { fs.unlinkSync(tmpFileDownload) client.removeObject(bucketName, _65mbObjectName, done) }, - ) + ).timeout(5000) }) describe('fGetObject-resume', () => { var localFile = `${tmpDir}/${_5mbObjectName}` @@ -1907,12 +1911,12 @@ describe('functional tests', function () { poller.removeAllListeners('notification') // clean up object now client.removeObject(bucketName, objectName, done) - }, 11 * 1000) + }, 10 * 1000) }) }, ) - }) - }) + }).timeout(120 * 1000) + }).timeout(120 * 1000) describe('Bucket Versioning API', () => { // Isolate the bucket/object for easy debugging and tracking. @@ -1944,7 +1948,7 @@ describe('functional tests', function () { }) }) - step('Suspend versioning on a bucket', (done) => { + step('Suspend versioning on a bucket', (done) => { client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { if (err && err.code === 'NotImplemented') { return done() @@ -2220,91 +2224,102 @@ describe('functional tests', function () { step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${versionedBucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, (done) => { - if (isVersioningSupported) { - let count = 1 - objVersionIdCounter.forEach(() => { - client.putObject( - versionedBucketName, - objNameWithPrefix, - readableStream(_1byte), - _1byte.length, - {}, - (e, data) => { - objArray.push(data) - if (count === objVersionIdCounter.length) { - done() - } - count += 1 - }, - ) - }) - } else { + if (!isVersioningSupported) { done() + return } + + let count = 1 + objVersionIdCounter.forEach(() => { + client.putObject( + versionedBucketName, + objNameWithPrefix, + readableStream(_1byte), + _1byte.length, + {}, + (e, data) => { + if (e) { + done(e) + } + objArray.push(data) + if (count === objVersionIdCounter.length) { + done() + } + count += 1 + }, + ) + }) }, ) step( `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, (done) => { - if (isVersioningSupported) { - client - .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray.length, listPrefixArray.length)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data) - }) - } else { + if (!isVersioningSupported) { done() + return } + + client + .listObjects(versionedBucketName, '', true, { + IncludeVersion: true, + }) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray.length, listPrefixArray.length)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data) + }) }, ) step( `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: ${prefixName}, recursive:true_`, (done) => { - if (isVersioningSupported) { - listPrefixArray = [] - client - .listObjects(versionedBucketName, prefixName, true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray.length, listPrefixArray.length)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data) - }) - } else { + if (!isVersioningSupported) { done() + return } + + listPrefixArray = [] + client + .listObjects(versionedBucketName, prefixName, true, { + IncludeVersion: true, + }) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray.length, listPrefixArray.length)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data) + }) }, ) step( `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}_Remove ${listObjectsNum} objects`, (done) => { - if (isVersioningSupported) { - let count = 1 - listPrefixArray.forEach((item) => { - client.removeObject(versionedBucketName, item.name, { versionId: item.versionId }, () => { - if (count === listPrefixArray.length) { - done() - } - count += 1 - }) - }) - } else { + if (!isVersioningSupported) { done() + return } + + let count = 1 + listPrefixArray.forEach((item) => { + client.removeObject(versionedBucketName, item.name, { versionId: item.versionId }, () => { + if (count === listPrefixArray.length) { + done() + } + count += 1 + }) + }) }, ) }) @@ -2373,7 +2388,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) + .listObjects(versionedBucketName, '', true, { + IncludeVersion: true, + }) .on('error', done) .on('end', () => { if (_.isEqual(2, objVersionList.length)) { @@ -3161,7 +3178,10 @@ describe('functional tests', function () { client.removeObject( objRetentionBucket, retentionObjName, - { versionId: versionId, governanceBypass: true }, + { + versionId: versionId, + governanceBypass: true, + }, () => { done() }, @@ -3404,7 +3424,10 @@ describe('functional tests', function () { client.setObjectLegalHold( objLegalHoldBucketName, objLegalHoldObjName, - { status: 'ON', versionId: versionId }, + { + status: 'ON', + versionId: versionId, + }, () => { done() }, @@ -3435,7 +3458,10 @@ describe('functional tests', function () { client.setObjectLegalHold( objLegalHoldBucketName, objLegalHoldObjName, - { status: 'OFF', versionId: versionId }, + { + status: 'OFF', + versionId: versionId, + }, () => { done() }, @@ -3466,7 +3492,10 @@ describe('functional tests', function () { client.removeObject( objLegalHoldBucketName, objLegalHoldObjName, - { versionId: versionId, governanceBypass: true }, + { + versionId: versionId, + governanceBypass: true, + }, () => { done() }, @@ -3780,7 +3809,9 @@ describe('functional tests', function () { secretKey: client.secretKey, }) - const aRoleConf = Object.assign({}, clientConfigParams, { credentialsProvider: assumeRoleProvider }) + const aRoleConf = Object.assign({}, clientConfigParams, { + credentialsProvider: assumeRoleProvider, + }) const assumeRoleClient = new minio.Client(aRoleConf) assumeRoleClient.region = server_region @@ -3942,7 +3973,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .removeObject(bucketToTestMultipart, _100kbObjectName, { versionId: versionedObjectRes.versionId }) + .removeObject(bucketToTestMultipart, _100kbObjectName, { + versionId: versionedObjectRes.versionId, + }) .then(() => done()) .catch(done) } else { @@ -3984,7 +4017,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .removeObject(bucketToTestMultipart, _65mbObjectName, { versionId: versionedMultiPartObjectRes.versionId }) + .removeObject(bucketToTestMultipart, _65mbObjectName, { + versionId: versionedMultiPartObjectRes.versionId, + }) .then(() => done()) .catch(done) } else { @@ -4269,7 +4304,8 @@ describe('functional tests', function () { }, ) }) - describe('Test listIncompleteUploads (Multipart listing) with special characters', () => { + describe('Test listIncompleteUploads (Multipart listing) with special characters', function () { + this.timeout(30 * 1000) const specialCharPrefix = 'SpecialMenùäöüexPrefix/' const objectNameSpecialChars = 'äöüex.pdf' const spObjWithPrefix = `${specialCharPrefix}${objectNameSpecialChars}` @@ -4280,10 +4316,6 @@ describe('functional tests', function () { step( `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${spBucketName}, objectName:${spObjWithPrefix}, metaData:${metaData}`, - () => client.initiateNewMultipartUpload(spBucketName, spObjWithPrefix, metaData), - ) - - step( `listIncompleteUploads(bucketName, prefix, recursive)_bucketName:${spBucketName}, prefix:${spObjWithPrefix}, recursive: true_`, function (done) { // MinIO's ListIncompleteUploads returns an empty list, so skip this on non-AWS. @@ -4382,13 +4414,22 @@ describe('functional tests', function () { `selectObjectContent(bucketName, objectName, selectOpts)_bucketName:${selObjContentBucket}, objectName:${selObject}`, (done) => { const selectOpts = { - expression: 'SELECT * FROM s3object s where s."Name" = \'Jane\'', + expression: `SELECT * FROM s3object s where s."Name" = 'Jane'`, expressionType: 'SQL', inputSerialization: { - CSV: { FileHeaderInfo: 'Use', RecordDelimiter: '\n', FieldDelimiter: ',' }, + CSV: { + FileHeaderInfo: 'Use', + RecordDelimiter: '\n', + FieldDelimiter: ',', + }, CompressionType: 'NONE', }, - outputSerialization: { CSV: { RecordDelimiter: '\n', FieldDelimiter: ',' } }, + outputSerialization: { + CSV: { + RecordDelimiter: '\n', + FieldDelimiter: ',', + }, + }, requestProgress: { Enabled: true }, } @@ -4588,7 +4629,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .listObjects(fdPrefixBucketName, '/my-prefix', true, { IncludeVersion: true }) + .listObjects(fdPrefixBucketName, '/my-prefix', true, { + IncludeVersion: true, + }) .on('error', done) .on('end', () => { if (_.isEqual(0, objVersionList.length)) { diff --git a/tests/unit/test.js b/tests/unit/test.js index 14135d9b..43741729 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -28,8 +28,8 @@ import { makeDateShort, partsRequired, } from '../../src/internal/helper.ts' -import * as Minio from '../../src/minio.js' -import { parseListObjects } from '../../src/xml-parsers.js' +import { parseListObjects } from '../../src/internal/xml-parsers.ts' +import * as Minio from '../../src/minio.ts' const Package = { version: 'development' }