diff --git a/src/AssumeRoleProvider.js b/src/AssumeRoleProvider.ts similarity index 52% rename from src/AssumeRoleProvider.js rename to src/AssumeRoleProvider.ts index 25411f85..6d047563 100644 --- a/src/AssumeRoleProvider.js +++ b/src/AssumeRoleProvider.ts @@ -1,13 +1,53 @@ -import * as Http from 'node:http' -import * as Https from 'node:https' +import type http from 'node:http' import { URL, URLSearchParams } from 'node:url' -import { CredentialProvider } from './CredentialProvider.js' -import { Credentials } from './Credentials.js' -import { makeDateLong, parseXml, toSha256 } from './helpers.js' -import { signV4ByServiceName } from './signing.js' +import { CredentialProvider } from './CredentialProvider.ts' +import { Credentials } from './Credentials.ts' +import { makeDateLong, parseXml, toSha256 } from './helpers.ts' +import { request } from './request.ts' +import { readAsString } from './response.ts' +import { signV4ByServiceName } from './signing.ts' + +type CredentialResponse = { + ErrorResponse?: { + Error?: { + Code?: string + Message?: string + } + } + + AssumeRoleResponse?: { + AssumeRoleResult?: { + Credentials?: { + AccessKeyId: string | undefined + SecretAccessKey: string | undefined + SessionToken: string | undefined + Expiration: string | undefined + } + } + } +} export class AssumeRoleProvider extends CredentialProvider { + private stsEndpoint: string + private accessKey: string + private secretKey: string + private durationSeconds: number + private sessionToken: string + private policy: string + private region: string + private roleArn: string + private roleSessionName: string + private externalId: string + private token: string + private webIdentityToken: string + private action: string + + private _credentials: Credentials | null + private expirySeconds: number | null + private accessExpiresAt: string | null + private transportAgent?: http.Agent + constructor({ stsEndpoint, accessKey, @@ -23,6 +63,21 @@ export class AssumeRoleProvider extends CredentialProvider { webIdentityToken, action = 'AssumeRole', transportAgent = undefined, + }: { + stsEndpoint: string + accessKey: string + secretKey: string + durationSeconds: number + sessionToken: string + policy: string + region?: string + roleArn: string + roleSessionName: string + externalId: string + token: string + webIdentityToken: string + action?: string + transportAgent?: http.Agent }) { super({}) @@ -39,6 +94,7 @@ export class AssumeRoleProvider extends CredentialProvider { this.webIdentityToken = webIdentityToken this.action = action this.sessionToken = sessionToken + // By default, nodejs uses a global agent if the 'agent' property // is set to undefined. Otherwise, it's okay to assume the users // know what they're doing if they specify a custom transport agent. @@ -47,12 +103,16 @@ export class AssumeRoleProvider extends CredentialProvider { /** * Internal Tracking variables */ - this.credentials = null + this._credentials = null this.expirySeconds = null this.accessExpiresAt = null } - getRequestConfig() { + getRequestConfig(): { + isHttp: boolean + requestOptions: http.RequestOptions + requestData: string + } { const url = new URL(this.stsEndpoint) const hostValue = url.hostname const portValue = url.port @@ -62,13 +122,13 @@ export class AssumeRoleProvider extends CredentialProvider { qryParams.set('Version', '2011-06-15') const defaultExpiry = 900 - let expirySeconds = parseInt(this.durationSeconds) + let expirySeconds = parseInt(this.durationSeconds as unknown as string) if (expirySeconds < defaultExpiry) { expirySeconds = defaultExpiry } this.expirySeconds = expirySeconds // for calculating refresh of credentials. - qryParams.set('DurationSeconds', this.expirySeconds) + qryParams.set('DurationSeconds', this.expirySeconds.toString()) if (this.policy) { qryParams.set('Policy', this.policy) @@ -97,9 +157,6 @@ export class AssumeRoleProvider extends CredentialProvider { const date = new Date() - /** - * Nodejs's Request Configuration. - */ const requestOptions = { hostname: hostValue, port: portValue, @@ -108,16 +165,22 @@ export class AssumeRoleProvider extends CredentialProvider { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded', - 'content-length': urlParams.length, + 'content-length': urlParams.length.toString(), host: hostValue, 'x-amz-date': makeDateLong(date), 'x-amz-content-sha256': contentSha256, - }, + } as Record, agent: this.transportAgent, - } + } satisfies http.RequestOptions - const authorization = signV4ByServiceName(requestOptions, this.accessKey, this.secretKey, this.region, date, 'sts') - requestOptions.headers.authorization = authorization + requestOptions.headers.authorization = signV4ByServiceName( + requestOptions, + this.accessKey, + this.secretKey, + this.region, + date, + 'sts', + ) return { requestOptions, @@ -126,50 +189,36 @@ export class AssumeRoleProvider extends CredentialProvider { } } - async performRequest() { + async performRequest(): Promise { const reqObj = this.getRequestConfig() const requestOptions = reqObj.requestOptions const requestData = reqObj.requestData const isHttp = reqObj.isHttp - const Transport = isHttp ? Http : Https - - const promise = new Promise((resolve, reject) => { - const requestObj = Transport.request(requestOptions, (resp) => { - let resChunks = [] - resp.on('data', (rChunk) => { - resChunks.push(rChunk) - }) - resp.on('end', () => { - let body = Buffer.concat(resChunks).toString() - const xmlobj = parseXml(body) - resolve(xmlobj) - }) - resp.on('error', (err) => { - reject(err) - }) - }) - requestObj.on('error', (e) => { - reject(e) - }) - requestObj.write(requestData) - requestObj.end() - }) - return promise + + const res = await request(requestOptions, isHttp, requestData) + + const body = await readAsString(res) + + return parseXml(body) } - parseCredentials(respObj = {}) { + parseCredentials(respObj: CredentialResponse = {}) { if (respObj.ErrorResponse) { - throw new Error('Unable to obtain credentials:', respObj) + throw new Error( + `Unable to obtain credentials: ${respObj.ErrorResponse?.Error?.Code} ${respObj.ErrorResponse?.Error?.Message}`, + { cause: respObj }, + ) } + const { AssumeRoleResponse: { AssumeRoleResult: { Credentials: { - AccessKeyId: accessKey, - SecretAccessKey: secretKey, - SessionToken: sessionToken, - Expiration: expiresAt, + AccessKeyId: accessKey = undefined, + SecretAccessKey: secretKey = undefined, + SessionToken: sessionToken = undefined, + Expiration: expiresAt = null, } = {}, } = {}, } = {}, @@ -184,38 +233,32 @@ export class AssumeRoleProvider extends CredentialProvider { }) this.setCredentials(newCreds) - return this.credentials + return this._credentials } - async refreshCredentials() { + async refreshCredentials(): Promise { try { const assumeRoleCredentials = await this.performRequest() - this.credentials = this.parseCredentials(assumeRoleCredentials) + this._credentials = this.parseCredentials(assumeRoleCredentials) } catch (err) { - this.credentials = null + this._credentials = null } - return this.credentials + return this._credentials } - async getCredentials() { - let credConfig - if (!this.credentials || (this.credentials && this.isAboutToExpire())) { + async getCredentials(): Promise { + let credConfig: Credentials | null + if (!this._credentials || (this._credentials && this.isAboutToExpire())) { credConfig = await this.refreshCredentials() } else { - credConfig = this.credentials + credConfig = this._credentials } return credConfig } isAboutToExpire() { - const expiresAt = new Date(this.accessExpiresAt) + const expiresAt = new Date(this.accessExpiresAt!) const provisionalExpiry = new Date(Date.now() + 1000 * 10) // check before 10 seconds. - const isAboutToExpire = provisionalExpiry > expiresAt - return isAboutToExpire + return provisionalExpiry > expiresAt } } - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default AssumeRoleProvider diff --git a/src/CredentialProvider.js b/src/CredentialProvider.ts similarity index 51% rename from src/CredentialProvider.js rename to src/CredentialProvider.ts index c1d35494..99aaebb0 100644 --- a/src/CredentialProvider.js +++ b/src/CredentialProvider.ts @@ -1,7 +1,17 @@ -import { Credentials } from './Credentials.js' +import { Credentials } from './Credentials.ts' export class CredentialProvider { - constructor({ accessKey, secretKey, sessionToken }) { + private credentials: Credentials + + constructor({ + accessKey, + secretKey, + sessionToken, + }: { + accessKey?: string + secretKey?: string + sessionToken?: string + }) { this.credentials = new Credentials({ accessKey, secretKey, @@ -9,19 +19,20 @@ export class CredentialProvider { }) } - getCredentials() { + // eslint-disable-next-line @typescript-eslint/require-await + async getCredentials(): Promise { return this.credentials.get() } - setCredentials(credentials) { + setCredentials(credentials: Credentials) { if (credentials instanceof Credentials) { this.credentials = credentials } else { - throw new Error('Unable to set Credentials . it should be an instance of Credentials class') + throw new Error('Unable to set Credentials. it should be an instance of Credentials class') } } - setAccessKey(accessKey) { + setAccessKey(accessKey: string) { this.credentials.setAccessKey(accessKey) } @@ -29,7 +40,7 @@ export class CredentialProvider { return this.credentials.getAccessKey() } - setSecretKey(secretKey) { + setSecretKey(secretKey: string) { this.credentials.setSecretKey(secretKey) } @@ -37,7 +48,7 @@ export class CredentialProvider { return this.credentials.getSecretKey() } - setSessionToken(sessionToken) { + setSessionToken(sessionToken: string) { this.credentials.setSessionToken(sessionToken) } @@ -45,8 +56,3 @@ export class CredentialProvider { return this.credentials.getSessionToken() } } - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default CredentialProvider diff --git a/src/Credentials.js b/src/Credentials.js deleted file mode 100644 index ad99155f..00000000 --- a/src/Credentials.js +++ /dev/null @@ -1,39 +0,0 @@ -export class Credentials { - constructor({ accessKey, secretKey, sessionToken }) { - this.accessKey = accessKey - this.secretKey = secretKey - this.sessionToken = sessionToken - } - - setAccessKey(accessKey) { - this.accessKey = accessKey - } - getAccessKey() { - return this.accessKey - } - setSecretKey(secretKey) { - this.secretKey = secretKey - } - getSecretKey() { - return this.secretKey - } - setSessionToken(sessionToken) { - this.sessionToken = sessionToken - } - getSessionToken() { - return this.sessionToken - } - - get() { - return { - accessKey: this.accessKey, - secretKey: this.secretKey, - sessionToken: this.sessionToken, - } - } -} - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default Credentials diff --git a/src/Credentials.ts b/src/Credentials.ts new file mode 100644 index 00000000..78e07388 --- /dev/null +++ b/src/Credentials.ts @@ -0,0 +1,47 @@ +export class Credentials { + public accessKey?: string + public secretKey?: string + public sessionToken?: string + + constructor({ + accessKey, + secretKey, + sessionToken, + }: { + accessKey?: string + secretKey?: string + sessionToken?: string + }) { + this.accessKey = accessKey + this.secretKey = secretKey + this.sessionToken = sessionToken + } + + setAccessKey(accessKey: string) { + this.accessKey = accessKey + } + + getAccessKey() { + return this.accessKey + } + + setSecretKey(secretKey: string) { + this.secretKey = secretKey + } + + getSecretKey() { + return this.secretKey + } + + setSessionToken(sessionToken: string) { + this.sessionToken = sessionToken + } + + getSessionToken() { + return this.sessionToken + } + + get(): Credentials { + return this + } +} diff --git a/src/as-callback.ts b/src/as-callback.ts new file mode 100644 index 00000000..a829d8b3 --- /dev/null +++ b/src/as-callback.ts @@ -0,0 +1,32 @@ +import { isFunction } from './helpers.ts' + +export function asCallback( + cb: undefined | ((err: unknown | null, result: T) => void), + promise: Promise, +): Promise | void { + if (cb === undefined) { + return promise + } + + if (!isFunction(cb)) { + throw new TypeError(`callback should be of type "function", got ${cb}`) + } + + promise.then( + (result) => { + cb(null, result) + }, + (err) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + cb(err) + }, + ) +} + +export function asCallbackFn( + cb: undefined | ((err: unknown | null, result: T) => void), + asyncFn: () => Promise, +): Promise | void { + return asCallback(cb, asyncFn()) +} diff --git a/src/async.ts b/src/async.ts new file mode 100644 index 00000000..b3ac9cb7 --- /dev/null +++ b/src/async.ts @@ -0,0 +1,21 @@ +// promise helper for stdlibl + +import * as fs from 'node:fs' +import * as stream from 'node:stream' +import { promisify } from 'node:util' + +export const fsp = { + fstat: promisify(fs.fstat), + stat: promisify(fs.stat), + lstat: promisify(fs.lstat), + open: promisify(fs.open), + fclose: promisify(fs.close), + rename: fs.promises.rename, + readfile: promisify(fs.readFile), + read: promisify(fs.read), +} + +export const streamPromise = { + // node:stream/promises Added in: v15.0.0 + pipeline: promisify(stream.pipeline), +} diff --git a/src/base-error.ts b/src/base-error.ts deleted file mode 100644 index d3947b6d..00000000 --- a/src/base-error.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/// - -/** - * @internal - */ -export class ExtendableError extends Error { - constructor(message?: string, opt?: ErrorOptions) { - // error Option {cause?: unknown} is a 'nice to have', - // don't use it internally - super(message, opt) - // set error name, otherwise it's always 'Error' - this.name = this.constructor.name - } -} diff --git a/src/client.ts b/src/client.ts new file mode 100644 index 00000000..64361ef5 --- /dev/null +++ b/src/client.ts @@ -0,0 +1,2067 @@ +import * as crypto from 'node:crypto' +import * as fs from 'node:fs' +import type { IncomingMessage } from 'node:http' +import * as http from 'node:http' +import * as https from 'node:https' +import * as path from 'node:path' +import * as stream from 'node:stream' + +import async from 'async' +import BlockStream2 from 'block-stream2' +import { isBrowser } from 'browser-or-node' +import _ from 'lodash' +import { mkdirp } from 'mkdirp' +import xml2js from 'xml2js' + +import { asCallback, asCallbackFn } from './as-callback.ts' +import { fsp, streamPromise } from './async.ts' +import { CredentialProvider } from './CredentialProvider.ts' +import * as errors from './errors.ts' +import { S3Error } from './errors.ts' +import { extensions } from './extensions.ts' +import type { AnyFunction, MetaData } from './helpers.ts' +import { + DEFAULT_REGION, + extractMetadata, + getVersionId, + insertContentType, + isAmazonEndpoint, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, + isValidBucketName, + isValidEndpoint, + isValidObjectName, + isValidPort, + isValidPrefix, + isVirtualHostStyle, + makeDateLong, + pipesetup, + prependXAMZMeta, + readableStream, + sanitizeETag, + toSha256, + uriEscape, + uriResourceEscape, +} from './helpers.ts' +import { qs } from './qs.ts' +import { drainResponse, readAsBuffer, readAsString } from './response.ts' +import type { Region } from './s3-endpoints.ts' +import { getS3Endpoint } from './s3-endpoints.ts' +import { signV4 } from './signing.ts' +import * as transformers from './transformers.ts' +import type { + Binary, + BucketItemFromList, + BucketItemStat, + GetObjectOpt, + IRequest, + MakeBucketOpt, + NoResultCallback, + RequestHeaders, + ResultCallback, + StatObjectOpts, + UploadedObjectInfo, +} from './type.ts' +import type { Part } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' + +const requestOptionProperties = [ + 'agent', + 'ca', + 'cert', + 'ciphers', + 'clientCertEngine', + 'crl', + 'dhparam', + 'ecdhCurve', + 'family', + 'honorCipherOrder', + 'key', + 'passphrase', + 'pfx', + 'rejectUnauthorized', + 'secureOptions', + 'secureProtocol', + 'servername', + 'sessionIdContext', +] as const + +export interface ClientOptions { + endPoint: string + accessKey: string + secretKey: string + useSSL?: boolean + port?: number + region?: Region + transport?: typeof http | typeof https + sessionToken?: string + partSize?: number + pathStyle?: boolean + credentialsProvider?: CredentialProvider + s3AccelerateEndpoint?: string + transportAgent?: http.Agent +} + +// will be replaced by rollup plugin +const version = process.env.MINIO_JS_PACKAGE_VERSION || 'development' +const Package = { version } + +export type RequestMethod = 'HEAD' | 'GET' | 'POST' | 'DELETE' | 'PUT' +export type RequestOption = Partial & { + method: RequestMethod + bucketName?: string + objectName?: string + region?: string + query?: string + pathStyle?: boolean +} + +/** + * @internal + */ +export function findCallback(args: unknown[]): [A, T | undefined] { + const index = args.findIndex((v) => isFunction(v)) + if (index === -1) { + return [args as A, undefined] + } + + return [args.slice(0, index) as A, args[index] as T] +} + +export class Client { + protected transport: typeof http | typeof https + protected host: string + protected port: number + protected protocol: string + protected accessKey: string + protected secretKey: string + protected sessionToken?: string + protected userAgent: string + protected anonymous: boolean + protected pathStyle: boolean + protected regionMap: Record + public region?: string + protected credentialsProvider?: CredentialProvider + partSize: number = 64 * 1024 * 1024 + protected overRidePartSize?: boolean + + protected maximumPartSize = 5 * 1024 * 1024 * 1024 + maxObjectSize = 5 * 1024 * 1024 * 1024 * 1024 + public enableSHA256: boolean + protected s3AccelerateEndpoint?: string + protected reqOptions: Record + + private readonly clientExtensions: extensions + private logStream?: stream.Writable + private transportAgent: http.Agent + + constructor(params: ClientOptions) { + // @ts-expect-error deprecated property + if (params.secure !== undefined) { + throw new Error('"secure" option deprecated, "useSSL" should be used instead') + } + // Default values if not specified. + if (params.useSSL === undefined) { + params.useSSL = true + } + if (!params.port) { + params.port = 0 + } + // Validate input params. + if (!isValidEndpoint(params.endPoint)) { + throw new errors.InvalidEndpointError(`Invalid endPoint : ${params.endPoint}`) + } + if (!isValidPort(params.port)) { + throw new errors.InvalidArgumentError(`Invalid port : ${params.port}`) + } + if (!isBoolean(params.useSSL)) { + throw new errors.InvalidArgumentError( + `Invalid useSSL flag type : ${params.useSSL}, expected to be of type "boolean"`, + ) + } + + // Validate region only if its set. + if (params.region) { + if (!isString(params.region)) { + throw new errors.InvalidArgumentError(`Invalid region : ${params.region}`) + } + } + + const host = params.endPoint.toLowerCase() + let port = params.port + let protocol: string + let transport + let transportAgent: http.Agent + // Validate if configuration is not using SSL + // for constructing relevant endpoints. + if (params.useSSL) { + // Defaults to secure. + transport = https + protocol = 'https:' + port = port || 443 + transportAgent = https.globalAgent + } else { + transport = http + protocol = 'http:' + port = port || 80 + transportAgent = http.globalAgent + } + + // if custom transport is set, use it. + if (params.transport) { + if (!isObject(params.transport)) { + throw new errors.InvalidArgumentError( + `Invalid transport type : ${params.transport}, expected to be type "object"`, + ) + } + transport = params.transport + } + + // if custom transport agent is set, use it. + if (params.transportAgent) { + if (!isObject(params.transportAgent)) { + throw new errors.InvalidArgumentError( + `Invalid transportAgent type: ${params.transportAgent}, expected to be type "object"`, + ) + } + + transportAgent = params.transportAgent + } + + // User Agent should always following the below style. + // Please open an issue to discuss any new changes here. + // + // MinIO (OS; ARCH) LIB/VER APP/VER + // + const libraryComments = `(${process.platform}; ${process.arch})` + const libraryAgent = `MinIO ${libraryComments} minio-js/${Package.version}` + // User agent block ends. + + this.transport = transport + this.transportAgent = transportAgent + this.host = host + this.port = port + this.protocol = protocol + this.accessKey = params.accessKey + this.secretKey = params.secretKey + this.sessionToken = params.sessionToken + this.userAgent = `${libraryAgent}` + + // Default path style is true + if (params.pathStyle === undefined) { + this.pathStyle = true + } else { + this.pathStyle = params.pathStyle + } + + if (!this.accessKey) { + this.accessKey = '' + } + if (!this.secretKey) { + this.secretKey = '' + } + this.anonymous = !this.accessKey || !this.secretKey + + if (params.credentialsProvider) { + this.credentialsProvider = params.credentialsProvider + void this.checkAndRefreshCreds() + } + + this.regionMap = {} + if (params.region) { + this.region = params.region + } + + if (params.partSize) { + this.partSize = params.partSize + this.overRidePartSize = true + } + if (this.partSize < 5 * 1024 * 1024) { + throw new errors.InvalidArgumentError(`Part size should be greater than 5MB`) + } + if (this.partSize > 5 * 1024 * 1024 * 1024) { + throw new errors.InvalidArgumentError(`Part size should be less than 5GB`) + } + + // SHA256 is enabled only for authenticated http requests. If the request is authenticated + // and the connection is https we use x-amz-content-sha256=UNSIGNED-PAYLOAD + // header for signature calculation. + this.enableSHA256 = !this.anonymous && !params.useSSL + + this.s3AccelerateEndpoint = params.s3AccelerateEndpoint || undefined + this.reqOptions = {} + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.clientExtensions = new extensions(this) + } + + /** + * This is s3 Specific and does not hold validity in any other Object storage. + */ + private getAccelerateEndPointIfSet(bucketName: string, objectName?: string) { + if (!isEmpty(this.s3AccelerateEndpoint) && !isEmpty(bucketName) && !isEmpty(objectName)) { + // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html + // Disable transfer acceleration for non-compliant bucket names. + if (bucketName.includes('.')) { + throw new Error(`Transfer Acceleration is not supported for non compliant bucket:${bucketName}`) + } + // If transfer acceleration is requested set new host. + // For more details about enabling transfer acceleration read here. + // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html + return this.s3AccelerateEndpoint + } + return false + } + + /** + * @param endPoint - valid S3 acceleration end point + */ + public setS3TransferAccelerate(endPoint: string) { + this.s3AccelerateEndpoint = endPoint + } + + /** + * Sets the supported request options. + */ + public setRequestOptions(options: Pick) { + // TODO: add options type details + if (!isObject(options)) { + throw new TypeError('request options should be of type "object"') + } + this.reqOptions = _.pick(options, requestOptionProperties) + } + + /** + * returns options object that can be used with http.request() + * Takes care of constructing virtual-host-style or path-style hostname + */ + protected getRequestOptions(opts: RequestOption): IRequest & { host: string; headers: Record } { + const method = opts.method + const region = opts.region + const bucketName = opts.bucketName + let objectName = opts.objectName + const headers = opts.headers + const query = opts.query + + let reqOptions = { + method, + headers: {} as RequestHeaders, + protocol: this.protocol, + // If custom transportAgent was supplied earlier, we'll inject it here + agent: this.transportAgent, + } + + // Verify if virtual host supported. + let virtualHostStyle + if (bucketName) { + virtualHostStyle = isVirtualHostStyle(this.host, this.protocol, bucketName, this.pathStyle) + } + + let path = '/' + let host = this.host + + let port: undefined | number + if (this.port) { + port = this.port + } + + if (objectName) { + objectName = `${uriResourceEscape(objectName)}` + } + + // For Amazon S3 endpoint, get endpoint based on region. + if (isAmazonEndpoint(host)) { + const accelerateEndPoint = this.getAccelerateEndPointIfSet(bucketName!, objectName) + if (accelerateEndPoint) { + host = `${accelerateEndPoint}` + } else { + host = getS3Endpoint(region!) + } + } + + if (virtualHostStyle && !opts.pathStyle) { + // For all hosts which support virtual host style, `bucketName` + // is part of the hostname in the following format: + // + // var host = 'bucketName.example.com' + // + if (bucketName) { + host = `${bucketName}.${host}` + } + if (objectName) { + path = `/${objectName}` + } + } else { + // For all S3 compatible storage services we will fallback to + // path style requests, where `bucketName` is part of the URI + // path. + if (bucketName) { + path = `/${bucketName}` + } + if (objectName) { + path = `/${bucketName}/${objectName}` + } + } + + if (query) { + path += `?${query}` + } + reqOptions.headers.host = host + if ((reqOptions.protocol === 'http:' && port !== 80) || (reqOptions.protocol === 'https:' && port !== 443)) { + reqOptions.headers.host = `${host}:${port}` + } + reqOptions.headers['user-agent'] = this.userAgent + if (headers) { + // have all header keys in lower case - to make signing easy + for (const [k, v] of Object.entries(headers)) { + reqOptions.headers[k.toLowerCase()] = v + } + } + + // Use any request option specified in minioClient.setRequestOptions() + reqOptions = Object.assign({}, this.reqOptions, reqOptions) + + return { + ...reqOptions, + headers: _.mapValues(reqOptions.headers, (v) => v.toString()), + host, + port, + path, + } satisfies https.RequestOptions + } + + /** + * Set application specific information. + * + * Generates User-Agent in the following style. + * + * MinIO (OS; ARCH) LIB/VER APP/VER + * + * @param appName - Application name. + * @param appVersion - Application version. + */ + public setAppInfo(appName: string, appVersion: string) { + if (!isString(appName)) { + throw new TypeError(`Invalid appName: ${appName}`) + } + if (appName.trim() === '') { + throw new errors.InvalidArgumentError('Input appName cannot be empty.') + } + if (!isString(appVersion)) { + throw new TypeError(`Invalid appVersion: ${appVersion}`) + } + if (appVersion.trim() === '') { + throw new errors.InvalidArgumentError('Input appVersion cannot be empty.') + } + this.userAgent = `${this.userAgent} ${appName}/${appVersion}` + } + + /** + * Calculate part size given the object size. Part size will be at least this.partSize + * + * @param size - total size + * + * @internal + */ + public calculatePartSize(size: number) { + if (!isNumber(size)) { + throw new TypeError('size should be of type "number"') + } + if (size > this.maxObjectSize) { + throw new TypeError(`size should not be more than ${this.maxObjectSize}`) + } + if (this.overRidePartSize) { + return this.partSize + } + let partSize = this.partSize + for (;;) { + // while(true) {...} throws linting error. + // If partSize is big enough to accomodate the object size, then use it. + if (partSize * 10000 > size) { + return partSize + } + // Try part sizes as 64MB, 80MB, 96MB etc. + partSize += 16 * 1024 * 1024 + } + } + + /** + * log the request, response, error + */ + private logHTTP(reqOptions: IRequest, response: http.IncomingMessage | null, err?: unknown) { + // if no logStream available return. + if (!this.logStream) { + return + } + if (!isObject(reqOptions)) { + throw new TypeError('reqOptions should be of type "object"') + } + if (response && !isReadableStream(response)) { + throw new TypeError('response should be of type "Stream"') + } + if (err && !(err instanceof Error)) { + throw new TypeError('err should be of type "Error"') + } + const logStream = this.logStream + const logHeaders = (headers: RequestHeaders) => { + Object.entries(headers).forEach(([k, v]) => { + if (k == 'authorization') { + if (isString(v)) { + const redactor = new RegExp('Signature=([0-9a-f]+)') + v = v.replace(redactor, 'Signature=**REDACTED**') + } + } + logStream.write(`${k}: ${v}\n`) + }) + logStream.write('\n') + } + logStream.write(`REQUEST: ${reqOptions.method} ${reqOptions.path}\n`) + logHeaders(reqOptions.headers) + if (response) { + this.logStream.write(`RESPONSE: ${response.statusCode}\n`) + logHeaders(response.headers as RequestHeaders) + } + if (err) { + logStream.write('ERROR BODY:\n') + const errJSON = JSON.stringify(err, null, '\t') + logStream.write(`${errJSON}\n`) + } + } + + /** + * Enable tracing + */ + public traceOn(stream?: stream.Writable) { + if (!stream) { + stream = process.stdout + } + this.logStream = stream + } + + /** + * Disable tracing + */ + public traceOff() { + this.logStream = undefined + } + + /** + * makeRequest is the primitive used by the apis for making S3 requests. + * payload can be empty string in case of no payload. + * statusCode is the expected statusCode. If response.statusCode does not match + * we parse the XML error and call the callback with the error message. + * + * A valid region is passed by the calls - listBuckets, makeBucket and getBucketRegion. + * + * @internal + */ + makeRequestAsync( + options: RequestOption, + payload: Binary | Uint8Array = '', + expectedCodes: number[] = [200], + region = '', + returnResponse = true, + ): Promise { + if (!isObject(options)) { + throw new TypeError('options should be of type "object"') + } + if (!isString(payload) && !isObject(payload)) { + // Buffer is of type 'object' + throw new TypeError('payload should be of type "string" or "Buffer"') + } + expectedCodes.forEach((statusCode) => { + if (!isNumber(statusCode)) { + throw new TypeError('statusCode should be of type "number"') + } + }) + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isBoolean(returnResponse)) { + throw new TypeError('returnResponse should be of type "boolean"') + } + if (!options.headers) { + options.headers = {} + } + if (options.method === 'POST' || options.method === 'PUT' || options.method === 'DELETE') { + options.headers['content-length'] = payload.length.toString() + } + + const sha256sum = this.enableSHA256 ? toSha256(payload) : '' + const stream = readableStream(payload) + return this.makeRequestStreamAsync(options, stream, sha256sum, expectedCodes, region, returnResponse) + } + + /** + * new request with promise + * + * No need to drain response, response body is not valid + */ + async makeRequestAsyncOmit( + options: RequestOption, + payload: Binary | Uint8Array = '', + statusCodes: number[] = [200], + region = '', + ): Promise> { + return await this.makeRequestAsync(options, payload, statusCodes, region, false) + } + + /** + * makeRequestStream will be used directly instead of makeRequest in case the payload + * is available as a stream. for ex. putObject + * + * @internal + */ + makeRequestStreamAsync( + options: RequestOption, + stream: stream.Readable | Buffer, + sha256sum: string, + statusCodes: number[] = [200], + region = '', + returnResponse = true, + ) { + if (!isObject(options)) { + throw new TypeError('options should be of type "object"') + } + if (!(Buffer.isBuffer(stream) || isReadableStream(stream))) { + throw new errors.InvalidArgumentError('stream should be a Buffer or readable Stream') + } + if (!isString(sha256sum)) { + throw new TypeError('sha256sum should be of type "string"') + } + statusCodes.forEach((statusCode) => { + if (!isNumber(statusCode)) { + throw new TypeError('statusCode should be of type "number"') + } + }) + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isBoolean(returnResponse)) { + throw new TypeError('returnResponse should be of type "boolean"') + } + + // sha256sum will be empty for anonymous or https requests + if (!this.enableSHA256 && sha256sum.length !== 0) { + throw new errors.InvalidArgumentError(`sha256sum expected to be empty for anonymous or https requests`) + } + // sha256sum should be valid for non-anonymous http requests. + if (this.enableSHA256 && sha256sum.length !== 64) { + throw new errors.InvalidArgumentError(`Invalid sha256sum : ${sha256sum}`) + } + + const regionPromise = region ? Promise.resolve(region) : this.getBucketRegionAsync(options.bucketName!) + + void this.checkAndRefreshCreds() + + return regionPromise.then( + (finalRegion) => + new Promise((resolve, reject) => { + options.region = finalRegion + const reqOptions = this.getRequestOptions(options) + if (!this.anonymous) { + // For non-anonymous https requests sha256sum is 'UNSIGNED-PAYLOAD' for signature calculation. + if (!this.enableSHA256) { + sha256sum = 'UNSIGNED-PAYLOAD' + } + + const date = new Date() + + reqOptions.headers['x-amz-date'] = makeDateLong(date) + reqOptions.headers['x-amz-content-sha256'] = sha256sum + if (this.sessionToken) { + reqOptions.headers['x-amz-security-token'] = this.sessionToken + } + + reqOptions.headers.authorization = signV4(reqOptions, this.accessKey, this.secretKey, finalRegion, date) + } + + const req = this.transport.request(reqOptions, (response) => { + if (!response.statusCode) { + return reject(new Error("BUG: response doesn't have a statusCode")) + } + + if (!statusCodes.includes(response.statusCode)) { + // For an incorrect region, S3 server always sends back 400. + // But we will do cache invalidation for all errors so that, + // in future, if AWS S3 decides to send a different status code or + // XML error code we will still work fine. + delete this.regionMap[options.bucketName!] + // @ts-expect-error looks like `getErrorTransformer` want a `http.ServerResponse`, + // but we only have a http.IncomingMessage here + const errorTransformer = transformers.getErrorTransformer(response) + pipesetup(response, errorTransformer).on('error', (e) => { + this.logHTTP(reqOptions, response, e) + reject(e) + }) + return + } + this.logHTTP(reqOptions, response) + if (returnResponse) { + return resolve(response) + } + // We drain the socket so that the connection gets closed. Note that this + // is not expensive as the socket will not have any data. + drainResponse(response).then(() => resolve(response), reject) + }) + + req.on('error', (e) => { + this.logHTTP(reqOptions, null, e) + reject(e) + }) + + if (Buffer.isBuffer(stream)) { + req.end(stream) + } else { + pipesetup(stream, req) + } + }), + ) + } + + /// Bucket operations + + /** + * Creates the bucket `bucketName`. + * + * @param bucketName - Name of the bucket + * @param region - region, see ts types for valid values, or use empty string. + * @param makeOpts - Options to create a bucket. + * @param callback? - if no callback. will return a promise. + */ + makeBucket(bucketName: string, region: Region, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + makeBucket(bucketName: string, region: Region, callback: NoResultCallback): void + makeBucket(bucketName: string, callback: NoResultCallback): void + makeBucket(bucketName: string, region?: Region, makeOpts?: MakeBucketOpt): Promise + + // there is also a deprecated Backward Compatibility sign + // makeBucket(bucketName: string, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + + makeBucket( + bucketName: string, + regionOrCallback?: string | NoResultCallback | MakeBucketOpt, // MakeBucketOpt as second params is deprecated + makeOptsOrCallback?: MakeBucketOpt | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + let [[region = '', makeOpts = {}], cb] = findCallback< + [string, MakeBucketOpt] | [MakeBucketOpt, string], + NoResultCallback + >([regionOrCallback, makeOptsOrCallback, callback]) + if (isObject(region)) { + // Backward Compatibility + // makeBucket(bucketName: string, makeOpts: MakeBucketOpt, callback: NoResultCallback): void + makeOpts = region + region = '' + } + + if (!isString(region)) { + throw new TypeError('region should be of type "string"') + } + if (!isObject(makeOpts)) { + throw new TypeError('makeOpts should be of type "object"') + } + + let payload = '' + // Region already set in constructor, validate if + // caller requested bucket location is same. + if (region && this.region) { + if (region !== this.region) { + throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`) + } + } + // sending makeBucket request with XML containing 'us-east-1' fails. For + // default region server expects the request without body + if (region && region !== DEFAULT_REGION) { + const builder = new xml2js.Builder({}) + + payload = builder.buildObject({ + CreateBucketConfiguration: { + $: { + xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', + }, + LocationConstraint: region, + }, + }) + } + const method = 'PUT' + const headers: RequestHeaders = {} + if (makeOpts.ObjectLocking) { + headers['x-amz-bucket-object-lock-enabled'] = true + } + if (!region) { + region = DEFAULT_REGION + } + const finalRegion = region // type narrow + const requestOpt: RequestOption = { method, bucketName, headers } + return asCallbackFn(cb, async () => { + try { + await this.makeRequestAsyncOmit(requestOpt, payload, [200], finalRegion) + } catch (err: unknown) { + if (region === '' || region === DEFAULT_REGION) { + if (err instanceof S3Error) { + const errCode = err.code + const errRegion = err.region + if (errCode === 'AuthorizationHeaderMalformed' && errRegion !== '') { + // Retry with region returned as part of error + await this.makeRequestAsyncOmit(requestOpt, payload, [200], errCode) + } + } + } + throw err + } + }) + } + + /** + * List of buckets created. + */ + listBuckets(): Promise + listBuckets(callback: ResultCallback): void + listBuckets(cb?: ResultCallback): void | Promise { + const method = 'GET' + return asCallbackFn(cb, async () => { + const response = await this.makeRequestAsync({ method }, '', [200], DEFAULT_REGION) + const body = await readAsBuffer(response) + return xmlParsers.parseListBucket(body.toString()) + }) + } + + listIncompleteUploads(bucket: string, prefix: string, recursive: boolean): stream.Readable { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (!isValidBucketName(bucket)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + const delimiter = recursive ? '' : '/' + let keyMarker = '' + let uploadIdMarker = '' + const uploads: unknown[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one upload info per _read() + if (uploads.length) { + return readStream.push(uploads.shift()) + } + if (ended) { + return readStream.push(null) + } + this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + result.prefixes.forEach((prefix) => uploads.push(prefix)) + async.eachSeries( + result.uploads, + (upload, cb) => { + // for each incomplete upload add the sizes of its uploaded parts + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.listParts(bucket, upload.key, upload.uploadId).then( + (parts: any) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + upload.size = parts.reduce((acc, item) => acc + item.size, 0) + uploads.push(upload) + cb() + }, + (err: any) => cb(err), + ) + }, + (err) => { + if (err) { + readStream.emit('error', err) + return + } + if (result.isTruncated) { + keyMarker = result.nextKeyMarker + uploadIdMarker = result.nextUploadIdMarker + } else { + ended = true + } + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + readStream._read() + }, + ) + }) + } + return readStream + } + + /** + * Remove a bucket. + * + * @param bucketName - name of the bucket + */ + bucketExists(bucketName: string, callback: ResultCallback): void + bucketExists(bucketName: string): Promise + + // * `callback(err)` _function_ : `err` is `null` if the bucket exists + bucketExists(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'HEAD' + + return asCallbackFn(cb, async () => { + try { + await this.makeRequestAsyncOmit({ method, bucketName }, '', [200], '') + } catch (err) { + if (err instanceof S3Error) { + if (err.code == 'NoSuchBucket' || err.code == 'NotFound') { + return false + } + } + + throw err + } + + return true + }) + } + + /** + * Remove a bucket + * + * @param bucketName - name of the bucket + * @param callback + */ + removeBucket(bucketName: string, callback: NoResultCallback): void + removeBucket(bucketName: string): Promise + + // * `callback(err)` _function_ : `err` is `null` if the bucket is removed successfully. + removeBucket(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + const method = 'DELETE' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName }, '', [204], '') + delete this.regionMap[bucketName] + }) + } + + /** + * Remove the partially uploaded object. + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param callback - callback function is called with non `null` value in case of error + */ + removeIncompleteUpload(bucketName: string, objectName: string, callback: NoResultCallback): void + removeIncompleteUpload(bucketName: string, objectName: string): Promise + + removeIncompleteUpload(bucketName: string, objectName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.IsValidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallbackFn(cb, async () => { + const uploadId = await this.findUploadId(bucketName, objectName) + if (!uploadId) { + return + } + const method = 'DELETE' + const query = `uploadId=${uploadId}` + await this.makeRequestAsync( + { + method, + bucketName, + objectName, + query, + }, + '', + [204], + '', + false, + ) + }) + } + + fGetObject(bucketName: string, objectName: string, filePath: string, callback: NoResultCallback): void + fGetObject( + bucketName: string, + objectName: string, + filePath: string, + getOpts: GetObjectOpt, + callback: NoResultCallback, + ): void + /** + * Callback is called with `error` in case of error or `null` in case of success + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param filePath - path to which the object data will be written to + * @param getOpts? - Optional object get option + */ + fGetObject(bucketName: string, objectName: string, filePath: string, getOpts?: GetObjectOpt): Promise + + fGetObject( + bucketName: string, + objectName: string, + filePath: string, + getOptsOrCallback?: GetObjectOpt | NoResultCallback, + callback?: NoResultCallback, + ) { + // Input validation. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + const [[getOpts = {}], cb] = findCallback<[GetObjectOpt], NoResultCallback>([getOptsOrCallback, callback]) + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const executor = async (): Promise => { + let partFileStream: stream.Writable + const objStat = await this.statObject(bucketName, objectName, getOpts) + const partFile = `${filePath}.${objStat.etag}.part.minio` + + await mkdirp(path.dirname(filePath)) + + let offset = 0 + try { + const stats = await fsp.stat(partFile) + if (objStat.size === stats.size) { + return partFile + } + offset = stats.size + partFileStream = fs.createWriteStream(partFile, { flags: 'a' }) + } catch (e) { + if (e instanceof Error && (e as unknown as { code: string }).code === 'ENOENT') { + // file not exist + partFileStream = fs.createWriteStream(partFile, { flags: 'w' }) + } else { + // other error, maybe access deny + throw e + } + } + + const downloadStream = await this.getPartialObject(bucketName, objectName, offset, 0, getOpts) + + await streamPromise.pipeline(downloadStream, partFileStream) + const stats = await fsp.stat(partFile) + if (stats.size === objStat.size) { + return partFile + } + + throw new Error('Size mismatch between downloaded file and the object') + } + + return asCallback( + cb, + executor().then((partFile) => fsp.rename(partFile, filePath)), + ) + } + + getObject( + bucketName: string, + objectName: string, + getOpts: GetObjectOpt, + callback: ResultCallback, + ): void + getObject(bucketName: string, objectName: string, callback: ResultCallback): void + + /** + * Get Objects. return a readable stream of the object content by callback or promise. + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param getOpts + */ + getObject(bucketName: string, objectName: string, getOpts?: GetObjectOpt): Promise + + getObject( + bucketName: string, + objectName: string, + getOpts_Callback?: GetObjectOpt | ResultCallback, // getOpts + callback?: ResultCallback, // callback + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const [[getOpts = {}], cb] = findCallback<[GetObjectOpt], ResultCallback>([ + getOpts_Callback, + callback, + ]) + + return asCallback(cb, this.getPartialObject(bucketName, objectName, 0, 0, getOpts)) + } + + /** + * Callback is called with readable stream of the partial object content. + */ + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length?: number, + getOpts?: GetObjectOpt, + ): Promise + + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + callback: ResultCallback, + ): void + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length: number, + callback: ResultCallback, + ): void + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length: number, + getOpts: GetObjectOpt, + callback: ResultCallback, + ): void + + getPartialObject( + bucketName: string, + objectName: string, + offset: number, + length_callback?: number | ResultCallback, // length + getOpts_callback?: GetObjectOpt | ResultCallback, // get opt + callback?: ResultCallback, // callback + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isNumber(offset)) { + throw new TypeError('offset should be of type "number"') + } + + const [[length = 0, getOpts = {}], cb] = findCallback<[number, GetObjectOpt], ResultCallback>([ + length_callback, + getOpts_callback, + callback, + ]) + + if (!isNumber(length)) { + throw new TypeError(`length should be of type "number"`) + } + + let range = '' + if (offset || length) { + if (offset) { + range = `bytes=${+offset}-` + } else { + range = 'bytes=0-' + offset = 0 + } + if (length) { + range += `${+length + offset - 1}` + } + } + + const headers: RequestHeaders = {} + if (range !== '') { + headers.range = range + } + + const expectedStatusCodes = [200] + if (range) { + expectedStatusCodes.push(206) + } + + const method = 'GET' + const query = qs(getOpts) + return asCallback( + cb, + this.makeRequestAsync({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes), + ) + } + + /** + * Uploads the object. + * + * Uploading a stream + * __Arguments__ + * * `bucketName` _string_: name of the bucket + * * `objectName` _string_: name of the object + * * `stream` _Stream_: Readable stream + * * `size` _number_: size of the object (optional) + * * `callback(err, etag)` _function_: non null `err` indicates error, `etag` _string_ is the etag of the object uploaded. + * + * Uploading "Buffer" or "string" + * __Arguments__ + * * `bucketName` _string_: name of the bucket + * * `objectName` _string_: name of the object + * * `string or Buffer` _string_ or _Buffer_: string or buffer + * * `callback(err, objInfo)` _function_: `err` is `null` in case of success and `info` will have the following object details: + * * `etag` _string_: etag of the object + * * `callback(err, objInfo)` _function_: non null `err` indicates error, `objInfo` _object_ which contains versionId and etag. + */ + fPutObject( + bucketName: string, + objectName: string, + filePath: string, + metaDataOrCallback?: MetaData, + maybeCallback?: NoResultCallback, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + let [[metaData = {}], callback] = findCallback<[MetaData], NoResultCallback>([metaDataOrCallback, maybeCallback]) + + if (!isObject(metaData)) { + throw new TypeError('metaData should be of type "object"') + } + + // Inserts correct `content-type` attribute based on metaData and filePath + metaData = insertContentType(metaData, filePath) + + // Updates metaData to have the correct prefix if needed + metaData = prependXAMZMeta(metaData) + const apiCallback = callback + + type Part = { + part: number + etag: string + } + + const executor = async (fd: number) => { + const stats = await fsp.fstat(fd) + const fileSize = stats.size + if (fileSize > this.maxObjectSize) { + throw new Error(`${filePath} size : ${stats.size}, max allowed size: 5TB`) + } + + if (fileSize <= this.partSize) { + // simple PUT request, no multipart + const uploader = this.getUploader(bucketName, objectName, metaData, false) + const buf = await fsp.readfile(fd) + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.enableSHA256) + return await uploader(buf, fileSize, sha256sum, md5sum) + } + + const previousUploadId = await this.findUploadId(bucketName, objectName) + let eTags: Part[] = [] + // if there was a previous incomplete upload, fetch all its uploaded parts info + let uploadId: string + if (previousUploadId) { + eTags = await this.listParts(bucketName, objectName, previousUploadId) + uploadId = previousUploadId + } else { + // there was no previous upload, initiate a new one + uploadId = await this.initiateNewMultipartUpload(bucketName, objectName, metaData) + } + + { + const partSize = this.calculatePartSize(fileSize) + const uploader = this.getUploader(bucketName, objectName, metaData, true) + // convert array to object to make things easy + const parts = eTags.reduce(function (acc, item) { + if (!acc[item.part]) { + acc[item.part] = item + } + return acc + }, {} as Record) + const partsDone: { part: number; etag: string }[] = [] + let partNumber = 1 + let uploadedSize = 0 + + // will be reused for hashing and uploading + // don't worry it's "unsafe", we will read data from fs to fill it + const buf = Buffer.allocUnsafe(this.partSize) + while (uploadedSize < fileSize) { + const part = parts[partNumber] + let length = partSize + if (length > fileSize - uploadedSize) { + length = fileSize - uploadedSize + } + + await fsp.read(fd, buf, 0, length, 0) + const { md5sum, sha256sum } = transformers.hashBinary(buf.subarray(0, length), this.enableSHA256) + + const md5sumHex = Buffer.from(md5sum, 'base64').toString('hex') + + if (part && md5sumHex === part.etag) { + // md5 matches, chunk already uploaded + partsDone.push({ part: partNumber, etag: part.etag }) + partNumber++ + uploadedSize += length + continue + } + + const objInfo = await uploader(uploadId, partNumber, buf.subarray(0, length), length, sha256sum, md5sum) + partsDone.push({ part: partNumber, etag: objInfo.etag }) + partNumber++ + uploadedSize += length + } + eTags = partsDone + } + + // at last, finish uploading + return this.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + } + + const ensureFileClose = async (executor: (fd: number) => Promise) => { + let fd + try { + fd = await fsp.open(filePath, 'r') + } catch (e) { + throw new Error(`failed to open file ${filePath}: err ${e}`, { cause: e }) + } + + try { + // make sure to keep await, otherwise file will be closed early. + return await executor(fd) + } finally { + await fsp.fclose(fd) + } + } + + return asCallback(apiCallback, ensureFileClose(executor)) + } + + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + // ====================================================== // + /* eslint-disable @typescript-eslint/ban-ts-comment */ + + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + ): Promise<{ etag: string; versionId: string | null }> + + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + cb: ResultCallback<{ etag: string; versionId: string | null }>, + ): void + + // this call will aggregate the parts on the server into a single object. + completeMultipartUpload( + bucketName: string, + objectName: string, + uploadId: string, + etags: { + part: number + etag?: string + }[], + cb?: ResultCallback<{ etag: string; versionId: string | null }>, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isObject(etags)) { + throw new TypeError('etags should be of type "Array"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + + const method = 'POST' + const query = `uploadId=${uriEscape(uploadId)}` + + const builder = new xml2js.Builder() + const payload = builder.buildObject({ + CompleteMultipartUpload: { + $: { + xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', + }, + Part: etags.map((etag) => { + return { + PartNumber: etag.part, + ETag: etag.etag, + } + }), + }, + }) + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }, payload) + const body = await readAsBuffer(res) + const result = xmlParsers.parseCompleteMultipart(body.toString()) + if (!result) { + throw new Error('BUG: failed to parse server response') + } + + if (result.errCode) { + // Multipart Complete API returns an error XML after a 200 http status + throw new errors.S3Error(result.errMessage) + } + + return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + etag: result.etag as string, + versionId: getVersionId(res.headers), + } + }) + } + + // Called by listIncompleteUploads to fetch a batch of incomplete uploads. + listIncompleteUploadsQuery( + bucketName: string, + prefix: string, + keyMarker: string, + uploadIdMarker: string, + delimiter: string, + ): stream.Transform { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(keyMarker)) { + throw new TypeError('keyMarker should be of type "string"') + } + if (!isString(uploadIdMarker)) { + throw new TypeError('uploadIdMarker should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + const queries = [] + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + + if (keyMarker) { + keyMarker = uriEscape(keyMarker) + queries.push(`key-marker=${keyMarker}`) + } + if (uploadIdMarker) { + queries.push(`upload-id-marker=${uploadIdMarker}`) + } + + const maxUploads = 1000 + queries.push(`max-uploads=${maxUploads}`) + queries.sort() + queries.unshift('uploads') + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + const method = 'GET' + const transformer = transformers.getListMultipartTransformer() + this.makeRequestAsync({ method, bucketName, query }, '', [200], '', true).then( + (response) => { + if (!response) { + throw new Error('BUG: no response') + } + + pipesetup(response, transformer) + }, + (e) => { + return transformer.emit('error', e) + }, + ) + return transformer + } + + public get extensions() { + return this.clientExtensions + } + + public async setCredentialsProvider(credentialsProvider: CredentialProvider) { + if (!(credentialsProvider instanceof CredentialProvider)) { + throw new Error('Unable to get credentials. Expected instance of CredentialProvider') + } + this.credentialsProvider = credentialsProvider + await this.checkAndRefreshCreds() + } + + private async fetchCredentials() { + if (this.credentialsProvider) { + const credentialsConf = await this.credentialsProvider.getCredentials() + if (credentialsConf) { + // @ts-expect-error secretKey maybe undefined + this.accessKey = credentialsConf.getAccessKey() + // @ts-expect-error secretKey maybe undefined + this.secretKey = credentialsConf.getSecretKey() + this.sessionToken = credentialsConf.getSessionToken() + } else { + throw new Error( + `Unable to get credentials. Expected instance of BaseCredentialsProvider, get ${credentialsConf}`, + ) + } + } else { + throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') + } + } + + /** + * Initiate a new multipart upload. + * @internal + */ + async initiateNewMultipartUpload(bucketName: string, objectName: string, metaData: MetaData): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(metaData)) { + throw new errors.InvalidObjectNameError('contentType should be of type "object"') + } + const method = 'POST' + const headers = Object.assign({}, metaData) + const query = 'uploads' + const res = await this.makeRequestAsync({ method, bucketName, objectName, query, headers }) + const body = await readAsBuffer(res) + return xmlParsers.parseInitiateMultipart(body.toString()) + } + + // TODO: this method some times will fail, and cause unhandled rejection error. + protected async checkAndRefreshCreds() { + if (this.credentialsProvider) { + return await this.fetchCredentials() + } + } + + /** + * gets the region of the bucket + * + * @param bucketName + * + * @internal + */ + protected async getBucketRegionAsync(bucketName: string): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) + } + + const me = this + + const executor = async (): Promise => { + // Region is set with constructor, return the region right here. + if (this.region) { + return this.region + } + + const cached = this.regionMap[bucketName] + if (cached) { + return cached + } + + const extractRegionAsync = async (response: IncomingMessage) => { + const body = await readAsString(response) + const region = xmlParsers.parseBucketRegion(body) + this.regionMap[bucketName] = region + return region + } + + const method = 'GET' + const query = 'location' + + // `getBucketLocation` behaves differently in following ways for + // different environments. + // + // - For nodejs env we default to path style requests. + // - For browser env path style requests on buckets yields CORS + // error. To circumvent this problem we make a virtual host + // style request signed with 'us-east-1'. This request fails + // with an error 'AuthorizationHeaderMalformed', additionally + // the error XML also provides Region of the bucket. To validate + // this region is proper we retry the same request with the newly + // obtained region. + const pathStyle = this.pathStyle && !isBrowser + + let region: string + + try { + const res = await me.makeRequestAsync({ method, bucketName, query, pathStyle }, '', [200], DEFAULT_REGION) + return extractRegionAsync(res) + } catch (e) { + if (!(e instanceof Error && e.name === 'AuthorizationHeaderMalformed')) { + throw e + } + // @ts-expect-error we set extra properties on error object + region = e.Region as string + if (!region) { + throw e + } + } + + const res = await me.makeRequestAsync({ method, bucketName, query, pathStyle }, '', [200], region) + return extractRegionAsync(res) + } + + return executor() + } + + findUploadId(bucketName: string, objectName: string, cb: ResultCallback): void + findUploadId(bucketName: string, objectName: string): Promise + findUploadId( + bucketName: string, + objectName: string, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + return asCallback( + cb, + new Promise((resolve, reject) => { + let latestUpload: string | undefined + const listNext = (keyMarker: string, uploadIdMarker: string) => { + this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '') + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + .on('error', (e) => reject(e)) + .on('data', (result) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + result.uploads.forEach((upload) => { + if (upload.key === objectName) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) { + latestUpload = upload + return + } + } + }) + if (result.isTruncated) { + listNext(result.nextKeyMarker as string, result.nextUploadIdMarker as string) + return + } + if (latestUpload) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return resolve(latestUpload.uploadId as string) + } + resolve(undefined) + }) + } + listNext('', '') + }), + ) + } + + // Stat information of the object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `statOpts` _object_ : Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional). + + statObject( + bucketName: string, + objectName: string, + statOpts: StatObjectOpts, + callback: ResultCallback, + ): void + statObject(bucketName: string, objectName: string, callback: ResultCallback): void + statObject(bucketName: string, objectName: string, statOpts?: StatObjectOpts): Promise + + statObject( + bucketName: string, + objectName: string, + statOptsOrCallback: StatObjectOpts | ResultCallback = {}, + callback?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let statOpts: StatObjectOpts = {} + let cb: ResultCallback | undefined + + // backward compatibility + if (typeof statOptsOrCallback === 'function') { + // statObject(bucketName, objectName, callback): void + statOpts = {} + cb = statOptsOrCallback + } else { + // statObject(bucketName, objectName, statOpts, callback): void + statOpts = statOptsOrCallback + cb = callback + } + + if (!isObject(statOpts)) { + throw new errors.InvalidArgumentError('statOpts should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const query = qs(statOpts) + const method = 'HEAD' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + + // We drain the socket so that the connection gets closed. Note that this + // is not expensive as the socket will not have any data. + // HEAD request doesn't expect to have many response body + await drainResponse(res) + + const result: BucketItemStat = { + size: parseInt(res.headers['content-length'] as string), + metaData: extractMetadata(res.headers), + lastModified: new Date(res.headers['last-modified'] as string), + versionId: getVersionId(res.headers), + etag: sanitizeETag(res.headers.etag), + } + + return result + }) + } + + getUploader( + bucketName: string, + objectName: string, + metaData: MetaData, + multipart: false, + ): (buf: Buffer, length: number, sha256sum: string, md5sum: string) => Promise + getUploader( + bucketName: string, + objectName: string, + metaData: MetaData, + multipart: true, + ): ( + uploadId: string, + partNumber: number, + buf: Buffer, + length: number, + sha256sum: string, + md5sum: string, + ) => Promise + + // a part of the multipart. + getUploader(bucketName: string, objectName: string, metaData: MetaData, multipart: boolean) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isBoolean(multipart)) { + throw new TypeError('multipart should be of type "boolean"') + } + if (!isObject(metaData)) { + throw new TypeError('metadata should be of type "object"') + } + + const validate = (stream: stream.Readable | Buffer, length: number, sha256sum: string, md5sum: string) => { + if (!(Buffer.isBuffer(stream) || isReadableStream(stream))) { + throw new TypeError('stream should be of type "Stream" or Buffer') + } + if (!isNumber(length)) { + throw new TypeError('length should be of type "number"') + } + if (!isString(sha256sum)) { + throw new TypeError('sha256sum should be of type "string"') + } + if (!isString(md5sum)) { + throw new TypeError('md5sum should be of type "string"') + } + } + + const simpleUploader = (buf: Buffer, length: number, sha256sum: string, md5sum: string) => { + validate(buf, length, sha256sum, md5sum) + return upload('', buf, length, sha256sum, md5sum) + } + + const multipartUploader = ( + uploadId: string, + partNumber: number, + buf: Buffer, + length: number, + sha256sum: string, + md5sum: string, + ) => { + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isNumber(partNumber)) { + throw new TypeError('partNumber should be of type "number"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('Empty uploadId') + } + if (!partNumber) { + throw new errors.InvalidArgumentError('partNumber cannot be 0') + } + validate(buf, length, sha256sum, md5sum) + const query = `partNumber=${partNumber}&uploadId=${uriEscape(uploadId)}` + return upload(query, buf, length, sha256sum, md5sum) + } + + const upload = async (query: string, stream: Buffer, length: number, sha256sum: string, md5sum: string) => { + const method = 'PUT' + let headers: RequestHeaders = { 'Content-Length': length } + + if (!multipart) { + headers = Object.assign({}, metaData, headers) + } + + if (!this.enableSHA256) { + headers['Content-MD5'] = md5sum + } + + const response = await this.makeRequestStreamAsync( + { + method, + bucketName, + objectName, + query, + headers, + }, + stream, + sha256sum, + [200], + '', + false, + ) + return { + etag: sanitizeETag(response.headers.etag), + versionId: getVersionId(response.headers), + } + } + if (multipart) { + return multipartUploader + } + return simpleUploader + } + + // Get part-info of all parts of an incomplete upload specified by uploadId. + listParts(bucketName: string, objectName: string, uploadId: string): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + return new Promise((resolve, reject) => { + let parts: Part[] = [] + const listNext = (marker?: number) => { + this.listPartsQuery(bucketName, objectName, uploadId, marker) + .then((result) => { + parts = parts.concat(result.parts) + if (result.isTruncated) { + listNext(result.marker) + return + } + resolve(parts) + }) + .catch((e) => reject(e)) + } + listNext(0) + }) + } + + // Called by listParts to fetch a batch of part-info + async listPartsQuery(bucketName: string, objectName: string, uploadId: string, marker?: number) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(uploadId)) { + throw new TypeError('uploadId should be of type "string"') + } + if (!isNumber(marker)) { + throw new TypeError('marker should be of type "number"') + } + if (!uploadId) { + throw new errors.InvalidArgumentError('uploadId cannot be empty') + } + let query = '' + if (marker && marker !== 0) { + query += `part-number-marker=${marker}&` + } + query += `uploadId=${uriEscape(uploadId)}` + + const method = 'GET' + + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseListParts(body.toString()) + } +} + +export async function uploadStream({ + client, + bucketName, + objectName, + metaData, + stream: source, + partSize, +}: { + client: Client + bucketName: string + objectName: string + metaData: MetaData + stream: stream.Readable + partSize: number +}): Promise { + // A map of the previously uploaded chunks, for resuming a file upload. This + // will be null if we aren't resuming an upload. + const oldParts: Record = {} + + // Keep track of the etags for aggregating the chunks together later. Each + // etag represents a single chunk of the file. + const eTags: Part[] = [] + + const previousUploadId = await client.findUploadId(bucketName, objectName) + let uploadId: string + if (!previousUploadId) { + uploadId = await client.initiateNewMultipartUpload(bucketName, objectName, metaData) + } else { + uploadId = previousUploadId + const oldTags = await client.listParts(bucketName, objectName, previousUploadId) + oldTags.forEach((e) => { + oldTags[e.part] = e + }) + } + + const chunkier = new BlockStream2({ size: partSize, zeroPadding: false }) + + const [_, o] = await Promise.all([ + new Promise((resolve, reject) => { + source.pipe(chunkier) + chunkier.on('end', resolve) + source.on('error', reject) + chunkier.on('error', reject) + }), + (async () => { + let partNumber = 1 + + for await (const chunk of chunkier) { + const md5 = crypto.createHash('md5').update(chunk).digest() + + const oldPart = oldParts[partNumber] + if (oldPart) { + if (oldPart.etag === md5.toString('hex')) { + eTags.push({ part: partNumber, etag: oldPart.etag }) + partNumber++ + continue + } + } + + partNumber++ + + // now start to upload missing part + const options: RequestOption = { + method: 'PUT', + query: qs({ partNumber, uploadId }), + headers: { + 'Content-Length': chunk.length, + 'Content-MD5': md5.toString('base64'), + }, + bucketName, + objectName, + } + + const response = await client.makeRequestAsyncOmit(options, chunk) + + let etag = response.headers.etag + if (etag) { + etag = etag.replace(/^"/, '').replace(/"$/, '') + } else { + etag = '' + } + + eTags.push({ part: partNumber, etag }) + } + + return await client.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + })(), + ]) + + return o +} diff --git a/src/copyConditions.ts b/src/copyConditions.ts new file mode 100644 index 00000000..25d00331 --- /dev/null +++ b/src/copyConditions.ts @@ -0,0 +1,37 @@ +export class CopyConditions { + public modified: string + public unmodified: string + public matchETag: string + public matchETagExcept: string + + constructor() { + this.modified = '' + this.unmodified = '' + this.matchETag = '' + this.matchETagExcept = '' + } + + setModified(date: Date): void { + if (!(date instanceof Date)) { + throw new TypeError('date must be of type Date') + } + + this.modified = date.toUTCString() + } + + setUnmodified(date: Date): void { + if (!(date instanceof Date)) { + throw new TypeError('date must be of type Date') + } + + this.unmodified = date.toUTCString() + } + + setMatchETag(etag: string): void { + this.matchETag = etag + } + + setMatchETagExcept(etag: string): void { + this.matchETagExcept = etag + } +} diff --git a/src/errors.ts b/src/errors.ts index fa6f62fb..12c583bd 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -14,94 +14,74 @@ * limitations under the License. */ -import { ExtendableError } from './base-error.ts' - -/** - * AnonymousRequestError is generated for anonymous keys on specific - * APIs. NOTE: PresignedURL generation always requires access keys. - */ -export class AnonymousRequestError extends ExtendableError {} - -/** - * InvalidArgumentError is generated for all invalid arguments. - */ +class ExtendableError extends Error { + // es6 doesn't support new error cause + // and nodejs runtime will add stack automatically, no need to add it. + constructor(message?: string, opt?: ErrorOptions) { + super(message, opt) + this.name = this.constructor.name + } +} + +// AnonymousRequestError is generated for anonymous keys on specific +// APIs. NOTE: PresignedURL generation always requires access keys. +export class AnonymousRequestError extends Error {} + +// InvalidArgumentError is generated for all invalid arguments. export class InvalidArgumentError extends ExtendableError {} -/** - * InvalidPortError is generated when a non integer value is provided - * for ports. - */ +// InvalidPortError is generated when a non integer value is provided +// for ports. export class InvalidPortError extends ExtendableError {} -/** - * InvalidEndpointError is generated when an invalid end point value is - * provided which does not follow domain standards. - */ +// InvalidEndpointError is generated when an invalid end point value is +// provided which does not follow domain standards. export class InvalidEndpointError extends ExtendableError {} -/** - * InvalidBucketNameError is generated when an invalid bucket name is - * provided which does not follow AWS S3 specifications. - * http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html - */ +// InvalidBucketNameError is generated when an invalid bucket name is +// provided which does not follow AWS S3 specifications. +// http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html export class InvalidBucketNameError extends ExtendableError {} -/** - * InvalidObjectNameError is generated when an invalid object name is - * provided which does not follow AWS S3 specifications. - * http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html - */ +// InvalidObjectNameError is generated when an invalid object name is +// provided which does not follow AWS S3 specifications. +// http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html export class InvalidObjectNameError extends ExtendableError {} -/** - * AccessKeyRequiredError generated by signature methods when access - * key is not found. - */ +// AccessKeyRequiredError generated by signature methods when access +// key is not found. export class AccessKeyRequiredError extends ExtendableError {} -/** - * SecretKeyRequiredError generated by signature methods when secret - * key is not found. - */ +// SecretKeyRequiredError generated by signature methods when secret +// key is not found. export class SecretKeyRequiredError extends ExtendableError {} -/** - * ExpiresParamError generated when expires parameter value is not - * well within stipulated limits. - */ +// ExpiresParamError generated when expires parameter value is not +// well within stipulated limits. export class ExpiresParamError extends ExtendableError {} -/** - * InvalidDateError generated when invalid date is found. - */ +// InvalidDateError generated when invalid date is found. export class InvalidDateError extends ExtendableError {} -/** - * InvalidPrefixError generated when object prefix provided is invalid - * or does not conform to AWS S3 object key restrictions. - */ +// InvalidPrefixError generated when object prefix provided is invalid +// or does not conform to AWS S3 object key restrictions. export class InvalidPrefixError extends ExtendableError {} -/** - * InvalidBucketPolicyError generated when the given bucket policy is invalid. - */ +// InvalidBucketPolicyError generated when the given bucket policy is invalid. export class InvalidBucketPolicyError extends ExtendableError {} -/** - * IncorrectSizeError generated when total data read mismatches with - * the input size. - */ +// IncorrectSizeError generated when total data read mismatches with +// the input size. export class IncorrectSizeError extends ExtendableError {} -/** - * InvalidXMLError generated when an unknown XML is found. - */ +// InvalidXMLError generated when an unknown XML is found. export class InvalidXMLError extends ExtendableError {} -/** - * S3Error is generated for errors returned from S3 server. - * see getErrorTransformer for details - */ -export class S3Error extends ExtendableError {} +// S3Error is generated for errors returned from S3 server. +// see getErrorTransformer for details +export class S3Error extends ExtendableError { + code?: string + region?: string +} export class IsValidBucketNameError extends ExtendableError {} diff --git a/src/extensions.js b/src/extensions.ts similarity index 80% rename from src/extensions.js rename to src/extensions.ts index 5e04a930..8e018ce1 100644 --- a/src/extensions.js +++ b/src/extensions.ts @@ -14,16 +14,18 @@ * limitations under the License. */ -import * as Stream from 'node:stream' +import * as stream from 'node:stream' import * as errors from './errors.ts' -import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.js' -import * as transformers from './transformers.js' +import { isBoolean, isNumber, isString, isValidBucketName, isValidPrefix, pipesetup, uriEscape } from './helpers.ts' +import * as transformers from './transformers.ts' +import type { TypedClient2 } from './typed-client2.ts' + +// TODO +type S3Object = unknown export class extensions { - constructor(client) { - this.client = client - } + constructor(readonly client: TypedClient2) {} // List the objects in the bucket using S3 ListObjects V2 With Metadata // @@ -42,7 +44,7 @@ export class extensions { // * `obj.lastModified` _Date_: modified time stamp // * `obj.metadata` _object_: metadata of the object - listObjectsV2WithMetadata(bucketName, prefix, recursive, startAfter) { + listObjectsV2WithMetadata(bucketName: string, prefix: string, recursive: boolean, startAfter: string) { if (prefix === undefined) { prefix = '' } @@ -68,11 +70,11 @@ export class extensions { throw new TypeError('startAfter should be of type "string"') } // if recursive is false set delimiter to '/' - var delimiter = recursive ? '' : '/' - var continuationToken = '' - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) + const delimiter = recursive ? '' : '/' + let continuationToken = '' + let objects: S3Object[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) readStream._read = () => { // push one object per _read() if (objects.length) { @@ -92,6 +94,7 @@ export class extensions { ended = true } objects = result.objects + // @ts-expect-error read more readStream._read() }) } @@ -109,7 +112,14 @@ export class extensions { // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. - listObjectsV2WithMetadataQuery(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { + private listObjectsV2WithMetadataQuery( + bucketName: string, + prefix: string, + continuationToken: string, + delimiter: string, + maxKeys: number, + startAfter: string, + ) { if (!isValidBucketName(bucketName)) { throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) } @@ -128,7 +138,7 @@ export class extensions { if (!isString(startAfter)) { throw new TypeError('startAfter should be of type "string"') } - var queries = [] + const queries = [] // Call for listing objects v2 API queries.push(`list-type=2`) @@ -155,23 +165,29 @@ export class extensions { queries.push(`max-keys=${maxKeys}`) } queries.sort() - var query = '' + let query = '' if (queries.length > 0) { query = `${queries.join('&')}` } - var method = 'GET' - var transformer = transformers.getListObjectsV2WithMetadataTransformer() - this.client.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) + const method = 'GET' + const transformer = transformers.getListObjectsV2WithMetadataTransformer() + this.client + .makeRequestAsync({ + method, + bucketName, + query, + }) + .then( + (response) => { + if (!response) { + throw new Error('BUG: callback missing response argument') + } + pipesetup(response, transformer) + }, + (e) => { + return transformer.emit('error', e) + }, + ) return transformer } } - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default extensions diff --git a/src/helpers.js b/src/helpers.ts similarity index 59% rename from src/helpers.js rename to src/helpers.ts index 18091089..35dae480 100644 --- a/src/helpers.js +++ b/src/helpers.ts @@ -14,9 +14,10 @@ * limitations under the License. */ -import * as Crypto from 'node:crypto' -import * as fs from 'node:fs' -import * as path from 'node:path' +import * as crypto from 'node:crypto' +import fs from 'node:fs' +import type { IncomingHttpHeaders } from 'node:http' +import path from 'node:path' import * as stream from 'node:stream' import { isBrowser } from 'browser-or-node' @@ -24,50 +25,20 @@ import { XMLParser } from 'fast-xml-parser' import ipaddr from 'ipaddr.js' import _ from 'lodash' import mime from 'mime-types' -import querystring from 'query-string' import * as errors from './errors.ts' +import { qs } from './qs.ts' +import type { Binary, Mode } from './type.ts' -const fxp = new XMLParser() - -// Returns a wrapper function that will promisify a given callback function. -// It will preserve 'this'. -export function promisify(fn) { - return function () { - // If the last argument is a function, assume its the callback. - let callback = arguments[arguments.length - 1] - - // If the callback is given, don't promisify, just pass straight in. - if (typeof callback === 'function') { - return fn.apply(this, arguments) - } - - // Otherwise, create a new set of arguments, and wrap - // it in a promise. - let args = [...arguments] - - return new Promise((resolve, reject) => { - // Add the callback function. - args.push((err, value) => { - if (err) { - return reject(err) - } - - resolve(value) - }) - - // Call the function with our special adaptor callback added. - fn.apply(this, args) - }) - } -} +export type MetaData = Record +export type Header = Record // All characters in string which are NOT unreserved should be percent encoded. // Unreserved characers are : ALPHA / DIGIT / "-" / "." / "_" / "~" // Reference https://tools.ietf.org/html/rfc3986#section-2.2 -export function uriEscape(string) { - return string.split('').reduce((acc, elem) => { - let buf = Buffer.from(elem) +export function uriEscape(string: string) { + return string.split('').reduce((acc: string, elem: string) => { + const buf = Buffer.from(elem) if (buf.length === 1) { // length 1 indicates that elem is not a unicode character. // Check if it is an unreserved characer. @@ -87,23 +58,23 @@ export function uriEscape(string) { } // elem needs encoding - i.e elem should be encoded if it's not unreserved // character or if it's a unicode character. - for (var i = 0; i < buf.length; i++) { - acc = acc + '%' + buf[i].toString(16).toUpperCase() + for (const char of buf) { + acc = acc + '%' + char.toString(16).toUpperCase() } return acc }, '') } -export function uriResourceEscape(string) { +export function uriResourceEscape(string: string) { return uriEscape(string).replace(/%2F/g, '/') } -export function getScope(region, date, serviceName = 's3') { +export function getScope(region: string, date: Date, serviceName = 's3') { return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request` } // isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn' -export function isAmazonEndpoint(endpoint) { +export function isAmazonEndpoint(endpoint: string) { return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn' } @@ -112,24 +83,24 @@ export function isAmazonEndpoint(endpoint) { // style if the protocol is 'https:', this is due to SSL wildcard // limitation. For all other buckets and Amazon S3 endpoint we will // default to virtual host style. -export function isVirtualHostStyle(endpoint, protocol, bucket, pathStyle) { - if (protocol === 'https:' && bucket.indexOf('.') > -1) { +export function isVirtualHostStyle(endpoint: string, protocol: string, bucket: string, pathStyle: boolean) { + if (protocol === 'https:' && bucket.includes('.')) { return false } return isAmazonEndpoint(endpoint) || !pathStyle } -export function isValidIP(ip) { +export function isValidIP(ip: string) { return ipaddr.isValid(ip) } // isValidEndpoint - true if endpoint is valid domain. -export function isValidEndpoint(endpoint) { +export function isValidEndpoint(endpoint: string) { return isValidDomain(endpoint) || isValidIP(endpoint) } // isValidDomain - true if input host is a valid domain. -export function isValidDomain(host) { +export function isValidDomain(host: string) { if (!isString(host)) { return false } @@ -149,10 +120,10 @@ export function isValidDomain(host) { if (host[0] === '.') { return false } - var alphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:> -1) { + for (const char of alphaNumerics) { + if (host.includes(char)) { return false } } @@ -163,7 +134,7 @@ export function isValidDomain(host) { // Probes contentType using file extensions. // For example: probeContentType('file.png') returns 'image/png'. -export function probeContentType(path) { +export function probeContentType(path: string) { let contentType = mime.lookup(path) if (!contentType) { contentType = 'application/octet-stream' @@ -172,7 +143,7 @@ export function probeContentType(path) { } // isValidPort - is input port valid. -export function isValidPort(port) { +export function isValidPort(port: unknown): port is number { // verify if port is a number. if (!isNumber(port)) { return false @@ -185,13 +156,13 @@ export function isValidPort(port) { if (port === 0) { return true } - var min_port = 1 - var max_port = 65535 + const min_port = 1 + const max_port = 65535 // Verify if port is in range. return port >= min_port && port <= max_port } -export function isValidBucketName(bucket) { +export function isValidBucketName(bucket: unknown) { if (!isString(bucket)) { return false } @@ -202,7 +173,7 @@ export function isValidBucketName(bucket) { return false } // bucket with successive periods is invalid. - if (bucket.indexOf('..') > -1) { + if (bucket.includes('..')) { return false } // bucket cannot have ip address style. @@ -218,7 +189,7 @@ export function isValidBucketName(bucket) { } // check if objectName is a valid object name -export function isValidObjectName(objectName) { +export function isValidObjectName(objectName: unknown) { if (!isValidPrefix(objectName)) { return false } @@ -229,7 +200,7 @@ export function isValidObjectName(objectName) { } // check if prefix is valid -export function isValidPrefix(prefix) { +export function isValidPrefix(prefix: unknown): prefix is string { if (!isString(prefix)) { return false } @@ -240,80 +211,98 @@ export function isValidPrefix(prefix) { } // check if typeof arg number -export function isNumber(arg) { +export function isNumber(arg: unknown): arg is number { return typeof arg === 'number' } +export type AnyFunction = (...args: any[]) => any + // check if typeof arg function -export function isFunction(arg) { +export function isFunction(arg: unknown): arg is AnyFunction { + return typeof arg === 'function' +} + +// check if typeof arg function or undefined +export function isOptionalFunction(arg: unknown): arg is undefined | AnyFunction { + if (arg === undefined) { + return true + } return typeof arg === 'function' } // check if typeof arg string -export function isString(arg) { +export function isString(arg: unknown): arg is string { return typeof arg === 'string' } // check if typeof arg object -export function isObject(arg) { +export function isObject(arg: unknown): arg is object { return typeof arg === 'object' && arg !== null } // check if object is readable stream -export function isReadableStream(arg) { - return isObject(arg) && isFunction(arg._read) +export function isReadableStream(arg: unknown): arg is stream.Readable { + // eslint-disable-next-line @typescript-eslint/unbound-method + return isObject(arg) && isFunction((arg as stream.Readable)._read) } // check if arg is boolean -export function isBoolean(arg) { +export function isBoolean(arg: unknown): arg is boolean { return typeof arg === 'boolean' } // check if arg is array -export function isArray(arg) { +export function isArray(arg: unknown): arg is Array { return Array.isArray(arg) } +export function isEmpty(o: unknown): o is null | undefined { + return _.isEmpty(o) +} + +export function isEmptyObject(o: Record): boolean { + return Object.values(o).filter((x) => x !== undefined).length !== 0 +} + // check if arg is a valid date -export function isValidDate(arg) { +export function isValidDate(arg: unknown): arg is Date { + // @ts-expect-error TS(2345): Argument of type 'Date' is not assignable to param... Remove this comment to see the full error message return arg instanceof Date && !isNaN(arg) } // Create a Date string with format: // 'YYYYMMDDTHHmmss' + Z -export function makeDateLong(date) { +export function makeDateLong(date?: Date): string { date = date || new Date() // Gives format like: '2017-08-07T16:28:59.889Z' - date = date.toISOString() + const s = date.toISOString() - return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 13) + date.slice(14, 16) + date.slice(17, 19) + 'Z' + return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 13) + s.slice(14, 16) + s.slice(17, 19) + 'Z' } // Create a Date string with format: // 'YYYYMMDD' -export function makeDateShort(date) { +export function makeDateShort(date?: Date) { date = date || new Date() // Gives format like: '2017-08-07T16:28:59.889Z' - date = date.toISOString() + const s = date.toISOString() - return date.slice(0, 4) + date.slice(5, 7) + date.slice(8, 10) + return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 10) } // pipesetup sets up pipe() from left to right os streams array // pipesetup will also make sure that error emitted at any of the upstream Stream // will be emitted at the last stream. This makes error handling simple -export function pipesetup(...streams) { - return streams.reduce((src, dst) => { - src.on('error', (err) => dst.emit('error', err)) - return src.pipe(dst) - }) +export function pipesetup(src: stream.Readable, dst: stream.Writable) { + src.on('error', (err: unknown) => dst.emit('error', err)) + return src.pipe(dst) } // return a Readable stream that emits data -export function readableStream(data) { - var s = new stream.Readable() +export function readableStream(data: unknown): stream.Readable { + const s = new stream.Readable() s._read = () => {} s.push(data) s.push(null) @@ -321,26 +310,30 @@ export function readableStream(data) { } // Process metadata to insert appropriate value to `content-type` attribute -export function insertContentType(metaData, filePath) { +export function insertContentType(metaData: MetaData, filePath: string) { // check if content-type attribute present in metaData - for (var key in metaData) { + for (const key in metaData) { if (key.toLowerCase() === 'content-type') { return metaData } } // if `content-type` attribute is not present in metadata, // then infer it from the extension in filePath - var newMetadata = Object.assign({}, metaData) + const newMetadata = Object.assign({}, metaData) newMetadata['content-type'] = probeContentType(filePath) return newMetadata } // Function prepends metadata with the appropriate prefix if it is not already on -export function prependXAMZMeta(metaData) { - var newMetadata = Object.assign({}, metaData) - for (var key in metaData) { - if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageclassHeader(key)) { - newMetadata['X-Amz-Meta-' + key] = newMetadata[key] +export function prependXAMZMeta(metaData?: MetaData) { + if (!metaData) { + return {} + } + + const newMetadata = Object.assign({}, metaData) + for (const [key, value] of _.entries(metaData)) { + if (!isAmzHeader(key) && !isSupportedHeader(key) && !isStorageClassHeader(key)) { + newMetadata['X-Amz-Meta-' + key] = value delete newMetadata[key] } } @@ -348,8 +341,8 @@ export function prependXAMZMeta(metaData) { } // Checks if it is a valid header according to the AmazonS3 API -export function isAmzHeader(key) { - var temp = key.toLowerCase() +export function isAmzHeader(key: string) { + const temp = key.toLowerCase() return ( temp.startsWith('x-amz-meta-') || temp === 'x-amz-acl' || @@ -357,9 +350,10 @@ export function isAmzHeader(key) { temp === 'x-amz-server-side-encryption' ) } + // Checks if it is a supported Header -export function isSupportedHeader(key) { - var supported_headers = [ +export function isSupportedHeader(key: string) { + const supported_headers = [ 'content-type', 'cache-control', 'content-encoding', @@ -367,20 +361,23 @@ export function isSupportedHeader(key) { 'content-language', 'x-amz-website-redirect-location', ] - return supported_headers.indexOf(key.toLowerCase()) > -1 + return supported_headers.includes(key.toLowerCase()) } + // Checks if it is a storage header -export function isStorageclassHeader(key) { +export function isStorageClassHeader(key: string) { return key.toLowerCase() === 'x-amz-storage-class' } -export function extractMetadata(metaData) { - var newMetadata = {} - for (var key in metaData) { - if (isSupportedHeader(key) || isStorageclassHeader(key) || isAmzHeader(key)) { +export function extractMetadata(metaData: IncomingHttpHeaders) { + const newMetadata = {} + for (const key in metaData) { + if (isSupportedHeader(key) || isStorageClassHeader(key) || isAmzHeader(key)) { if (key.toLowerCase().startsWith('x-amz-meta-')) { + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message newMetadata[key.slice(11, key.length)] = metaData[key] } else { + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message newMetadata[key] = metaData[key] } } @@ -388,68 +385,68 @@ export function extractMetadata(metaData) { return newMetadata } -export function getVersionId(headers = {}) { - const versionIdValue = headers['x-amz-version-id'] +export function getVersionId(headers: IncomingHttpHeaders = {}) { + const versionIdValue = headers['x-amz-version-id'] as string return versionIdValue || null } -export function getSourceVersionId(headers = {}) { +export function getSourceVersionId(headers: IncomingHttpHeaders = {}) { const sourceVersionId = headers['x-amz-copy-source-version-id'] return sourceVersionId || null } -export function sanitizeETag(etag = '') { - var replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } +export function sanitizeETag(etag = ''): string { + const replaceChars = { '"': '', '"': '', '"': '', '"': '', '"': '' } + // @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message return etag.replace(/^("|"|")|("|"|")$/g, (m) => replaceChars[m]) } export const RETENTION_MODES = { GOVERNANCE: 'GOVERNANCE', COMPLIANCE: 'COMPLIANCE', -} +} as const export const RETENTION_VALIDITY_UNITS = { DAYS: 'Days', YEARS: 'Years', -} +} as const export const LEGAL_HOLD_STATUS = { ENABLED: 'ON', DISABLED: 'OFF', -} +} as const -const objectToBuffer = (payload) => { - const payloadBuf = Buffer.from(Buffer.from(payload)) - return payloadBuf +function objectToBuffer(payload: Binary | Uint8Array): Buffer { + // don't know how to write this... + return Buffer.from(payload) } -export const toMd5 = (payload) => { - let payLoadBuf = objectToBuffer(payload) +export function toMd5(payload: Binary | Uint8Array): string { + let payLoadBuf: Binary = objectToBuffer(payload) // use string from browser and buffer from nodejs // browser support is tested only against minio server payLoadBuf = isBrowser ? payLoadBuf.toString() : payLoadBuf - return Crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') + return crypto.createHash('md5').update(payLoadBuf).digest().toString('base64') } -export const toSha256 = (payload) => { - return Crypto.createHash('sha256').update(payload).digest('hex') +export function toSha256(payload: Binary | Uint8Array): string { + return crypto.createHash('sha256').update(payload).digest('hex') } // toArray returns a single element array with param being the element, // if param is just a string, and returns 'param' back if it is an array // So, it makes sure param is always an array -export const toArray = (param) => { +export function toArray(param: T | T[]): Array { if (!Array.isArray(param)) { - return [param] + return [param] as T[] } return param } -export const sanitizeObjectKey = (objectName) => { +export function sanitizeObjectKey(objectName: string): string { // + symbol characters are not decoded as spaces in JS. so replace them first and decode to get the correct result. - let asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') - const sanitizedName = decodeURIComponent(asStrName) - return sanitizedName + const asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ') + return decodeURIComponent(asStrName) } export const PART_CONSTRAINTS = { @@ -483,23 +480,27 @@ const ENCRYPTION_HEADERS = { sseGenericHeader: GENERIC_SSE_HEADER, // sseKmsKeyID is the AWS SSE-KMS key id. sseKmsKeyID: GENERIC_SSE_HEADER + '-Aws-Kms-Key-Id', -} +} as const /** * Return Encryption headers * @param encConfig * @returns an object with key value pairs that can be used in headers. */ -function getEncryptionHeaders(encConfig) { +function getEncryptionHeaders(encConfig: Encryption): Record { const encType = encConfig.type const encHeaders = {} - if (!_.isEmpty(encType)) { + if (!isEmpty(encType)) { if (encType === ENCRYPTION_TYPES.SSEC) { return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore [encHeaders[ENCRYPTION_HEADERS.sseGenericHeader]]: 'AES256', } } else if (encType === ENCRYPTION_TYPES.KMS) { return { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore [ENCRYPTION_HEADERS.sseGenericHeader]: encConfig.SSEAlgorithm, [ENCRYPTION_HEADERS.sseKmsKeyID]: encConfig.KMSMasterKeyID, } @@ -510,16 +511,28 @@ function getEncryptionHeaders(encConfig) { } export class CopySourceOptions { + public readonly Bucket: string + public readonly Object: string + public readonly VersionID: string + public MatchETag: string + private readonly NoMatchETag: string + private readonly MatchModifiedSince: string | null + private readonly MatchUnmodifiedSince: string | null + public readonly MatchRange: boolean + public readonly Start: number + public readonly End: number + private readonly Encryption?: Encryption + /** * - * @param Bucket __string__ Bucket Name - * @param Object __string__ Object Name - * @param VersionID __string__ Valid versionId - * @param MatchETag __string__ Etag to match - * @param NoMatchETag __string__ Etag to exclude - * @param MatchModifiedSince __string__ Modified Date of the object/part. UTC Date in string format - * @param MatchUnmodifiedSince __string__ Modified Date of the object/part to exclude UTC Date in string format - * @param MatchRange __boolean__ true or false Object range to match + * @param Bucket - Bucket Name + * @param Object - Object Name + * @param VersionID - Valid versionId + * @param MatchETag - Etag to match + * @param NoMatchETag - Etag to exclude + * @param MatchModifiedSince - Modified Date of the object/part. UTC Date in string format + * @param MatchUnmodifiedSince - Modified Date of the object/part to exclude UTC Date in string format + * @param MatchRange - true or false Object range to match * @param Start * @param End * @param Encryption @@ -535,7 +548,19 @@ export class CopySourceOptions { MatchRange = false, Start = 0, End = 0, - Encryption = {}, + Encryption = undefined, + }: { + Bucket?: string + Object?: string + VersionID?: string + MatchETag?: string + NoMatchETag?: string + MatchModifiedSince?: string | null + MatchUnmodifiedSince?: string | null + MatchRange?: boolean + Start?: number + End?: number + Encryption?: Encryption } = {}) { this.Bucket = Bucket this.Object = Object @@ -569,24 +594,24 @@ export class CopySourceOptions { } getHeaders() { - let headerOptions = {} + const headerOptions: Header = {} headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) - if (!_.isEmpty(this.VersionID)) { + if (!isEmpty(this.VersionID)) { headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object) + '?versionId=' + this.VersionID } - if (!_.isEmpty(this.MatchETag)) { + if (!isEmpty(this.MatchETag)) { headerOptions['x-amz-copy-source-if-match'] = this.MatchETag } - if (!_.isEmpty(this.NoMatchETag)) { + if (!isEmpty(this.NoMatchETag)) { headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag } - if (!_.isEmpty(this.MatchModifiedSince)) { + if (!isEmpty(this.MatchModifiedSince)) { headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince } - if (!_.isEmpty(this.MatchUnmodifiedSince)) { + if (!isEmpty(this.MatchUnmodifiedSince)) { headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince } @@ -594,30 +619,54 @@ export class CopySourceOptions { } } +export type Encryption = { + type: string + SSEAlgorithm?: string + KMSMasterKeyID?: string +} + export class CopyDestinationOptions { - /* - * @param Bucket __string__ - * @param Object __string__ Object Name for the destination (composed/copied) object defaults - * @param Encryption __object__ Encryption configuration defaults to {} - * @param UserMetadata __object__ - * @param UserTags __object__ | __string__ - * @param LegalHold __string__ ON | OFF - * @param RetainUntilDate __string__ UTC Date String + public readonly Bucket: string + public readonly Object: string + private readonly Encryption?: Encryption + private readonly UserMetadata?: MetaData + private readonly UserTags?: Record | string + private readonly LegalHold?: 'on' | 'off' + private readonly RetainUntilDate?: string + private readonly Mode?: Mode + + /** + * @param Bucket - Bucket name + * @param Object - Object Name for the destination (composed/copied) object defaults + * @param Encryption - Encryption configuration defaults to {} + * @param UserMetadata - + * @param UserTags + * @param LegalHold - + * @param RetainUntilDate - UTC Date String * @param Mode */ constructor({ - Bucket = '', - Object = '', - Encryption = null, - UserMetadata = null, - UserTags = null, - LegalHold = null, - RetainUntilDate = null, - Mode = null, // + Bucket, + Object, + Encryption, + UserMetadata, + UserTags, + LegalHold, + RetainUntilDate, + Mode, + }: { + Bucket: string + Object: string + Encryption?: Encryption + UserMetadata?: MetaData + UserTags?: Record | string + LegalHold?: 'on' | 'off' + RetainUntilDate?: string + Mode?: Mode }) { this.Bucket = Bucket this.Object = Object - this.Encryption = Encryption + this.Encryption = Encryption ?? undefined // null input will become undefined, easy for runtime assert this.UserMetadata = UserMetadata this.UserTags = UserTags this.LegalHold = LegalHold @@ -625,47 +674,43 @@ export class CopyDestinationOptions { this.RetainUntilDate = RetainUntilDate } - getHeaders() { + getHeaders(): Record { const replaceDirective = 'REPLACE' - const headerOptions = {} + const headerOptions: Record = {} const userTags = this.UserTags - if (!_.isEmpty(userTags)) { + if (!isEmpty(userTags)) { headerOptions['X-Amz-Tagging-Directive'] = replaceDirective - headerOptions['X-Amz-Tagging'] = isObject(userTags) - ? querystring.stringify(userTags) - : isString(userTags) - ? userTags - : '' + headerOptions['X-Amz-Tagging'] = isObject(userTags) ? qs(userTags) : isString(userTags) ? userTags : '' } - if (!_.isEmpty(this.Mode)) { + if (this.Mode) { headerOptions['X-Amz-Object-Lock-Mode'] = this.Mode // GOVERNANCE or COMPLIANCE } - if (!_.isEmpty(this.RetainUntilDate)) { + if (this.RetainUntilDate) { headerOptions['X-Amz-Object-Lock-Retain-Until-Date'] = this.RetainUntilDate // needs to be UTC. } - if (!_.isEmpty(this.LegalHold)) { + if (this.LegalHold) { headerOptions['X-Amz-Object-Lock-Legal-Hold'] = this.LegalHold // ON or OFF } - if (!_.isEmpty(this.UserMetadata)) { - const headerKeys = Object.keys(this.UserMetadata) - headerKeys.forEach((key) => { - headerOptions[`X-Amz-Meta-${key}`] = this.UserMetadata[key] - }) + if (this.UserMetadata) { + for (const [key, value] of Object.entries(this.UserMetadata)) { + headerOptions[`X-Amz-Meta-${key}`] = value.toString() + } } - if (!_.isEmpty(this.Encryption)) { + if (this.Encryption) { const encryptionHeaders = getEncryptionHeaders(this.Encryption) - Object.keys(encryptionHeaders).forEach((key) => { - headerOptions[key] = encryptionHeaders[key] - }) + for (const [key, value] of Object.entries(encryptionHeaders)) { + headerOptions[key] = value + } } return headerOptions } + validate() { if (!isValidBucketName(this.Bucket)) { throw new errors.InvalidBucketNameError('Invalid Destination bucket name: ' + this.Bucket) @@ -673,25 +718,25 @@ export class CopyDestinationOptions { if (!isValidObjectName(this.Object)) { throw new errors.InvalidObjectNameError(`Invalid Destination object name: ${this.Object}`) } - if (!_.isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) { + if (!isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) { throw new errors.InvalidObjectNameError(`Destination UserMetadata should be an object with key value pairs`) } - if (!_.isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) { + if (!isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) { throw new errors.InvalidObjectNameError( `Invalid Mode specified for destination object it should be one of [GOVERNANCE,COMPLIANCE]`, ) } - if (!_.isEmpty(this.Encryption) && _.isEmpty(this.Encryption)) { + if (this.Encryption !== undefined && isEmptyObject(this.Encryption)) { throw new errors.InvalidObjectNameError(`Invalid Encryption configuration for destination object `) } return true } } -export const partsRequired = (size) => { - let maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) +export function partsRequired(size: number): number { + const maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1) let requiredPartSize = size / maxPartSize if (size % maxPartSize > 0) { requiredPartSize++ @@ -704,19 +749,16 @@ export const partsRequired = (size) => { // start and end index slices. Splits happen evenly to be sure that no // part is less than 5MiB, as that could fail the multipart request if // it is not the last part. - -let startIndexParts = [] -let endIndexParts = [] -export function calculateEvenSplits(size, objInfo) { +export function calculateEvenSplits(size: number, objInfo: { Start?: unknown; Bucket: string; Object: string }) { if (size === 0) { return null } const reqParts = partsRequired(size) - startIndexParts = new Array(reqParts) - endIndexParts = new Array(reqParts) + const startIndexParts = new Array(reqParts) + const endIndexParts = new Array(reqParts) - let start = objInfo.Start - if (_.isEmpty(objInfo.Start) || start === -1) { + let start = objInfo.Start as number + if (isEmpty(objInfo.Start) || start === -1) { start = 0 } const divisorValue = Math.trunc(size / reqParts) @@ -732,7 +774,7 @@ export function calculateEvenSplits(size, objInfo) { } const currentStart = nextStart - let currentEnd = currentStart + curPartSize - 1 + const currentEnd = currentStart + curPartSize - 1 nextStart = currentEnd + 1 startIndexParts[i] = currentStart @@ -742,31 +784,31 @@ export function calculateEvenSplits(size, objInfo) { return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo } } -export function removeDirAndFiles(dirPath, removeSelf) { - if (removeSelf === undefined) { - removeSelf = true - } +export function removeDirAndFiles(dirPath: string, removeSelf = true) { + let files try { - var files = fs.readdirSync(dirPath) + files = fs.readdirSync(dirPath) } catch (e) { return } - if (files.length > 0) { - for (var i = 0; i < files.length; i++) { - var filePath = path.join(dirPath, files[i]) - if (fs.statSync(filePath).isFile()) { - fs.unlinkSync(filePath) - } else { - removeDirAndFiles(filePath) - } + + for (const item of files) { + const filePath = path.join(dirPath, item) + if (fs.statSync(filePath).isFile()) { + fs.unlinkSync(filePath) + } else { + removeDirAndFiles(filePath, true) } } + if (removeSelf) { fs.rmdirSync(dirPath) } } -export const parseXml = (xml) => { +const fxp = new XMLParser() + +export function parseXml(xml: string): any { let result = null result = fxp.parse(xml) if (result.Error) { @@ -776,12 +818,23 @@ export const parseXml = (xml) => { return result } +// maybe this should be a generic type for Records, leave it for later refactor export class SelectResults { + private records?: unknown + private response?: unknown + private stats?: string + private progress?: unknown + constructor({ records, // parsed data as stream response, // original response stream stats, // stats as xml progress, // stats as xml + }: { + records?: unknown + response?: unknown + stats?: string + progress?: unknown }) { this.records = records this.response = response @@ -789,32 +842,35 @@ export class SelectResults { this.progress = progress } - setStats(stats) { + setStats(stats: string) { this.stats = stats } + getStats() { return this.stats } - setProgress(progress) { + setProgress(progress: unknown) { this.progress = progress } + getProgress() { return this.progress } - setResponse(response) { + setResponse(response: unknown) { this.response = response } + getResponse() { return this.response } - setRecords(records) { + setRecords(records: unknown) { this.records = records } - getRecords() { + getRecords(): unknown { return this.records } } diff --git a/src/minio.js b/src/minio.js deleted file mode 100644 index 2567e254..00000000 --- a/src/minio.js +++ /dev/null @@ -1,3987 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as fs from 'node:fs' -import * as Http from 'node:http' -import * as Https from 'node:https' -import * as path from 'node:path' -import * as Stream from 'node:stream' - -import async from 'async' -import BlockStream2 from 'block-stream2' -import _ from 'lodash' -import mkdirp from 'mkdirp' -import * as querystring from 'query-string' -import { TextEncoder } from 'web-encoding' -import Xml from 'xml' -import xml2js from 'xml2js' - -import { CredentialProvider } from './CredentialProvider.js' -import * as errors from './errors.ts' -import { extensions } from './extensions.js' -import { - calculateEvenSplits, - CopyDestinationOptions, - CopySourceOptions, - DEFAULT_REGION, - extractMetadata, - getScope, - getSourceVersionId, - getVersionId, - insertContentType, - isAmazonEndpoint, - isArray, - isBoolean, - isFunction, - isNumber, - isObject, - isReadableStream, - isString, - isValidBucketName, - isValidDate, - isValidEndpoint, - isValidObjectName, - isValidPort, - isValidPrefix, - isVirtualHostStyle, - LEGAL_HOLD_STATUS, - makeDateLong, - PART_CONSTRAINTS, - partsRequired, - pipesetup, - prependXAMZMeta, - promisify, - readableStream, - RETENTION_MODES, - RETENTION_VALIDITY_UNITS, - sanitizeETag, - toMd5, - toSha256, - uriEscape, - uriResourceEscape, -} from './helpers.js' -import { NotificationConfig, NotificationPoller } from './notification.js' -import { ObjectUploader } from './object-uploader.js' -import { getS3Endpoint } from './s3-endpoints.js' -import { postPresignSignatureV4, presignSignatureV4, signV4 } from './signing.js' -import * as transformers from './transformers.js' -import { parseSelectObjectContentResponse } from './xml-parsers.js' - -// will be replaced by bundler -const Package = { version: process.env.MINIO_JS_PACKAGE_VERSION || 'development' } - -export * from './helpers.js' -export * from './notification.js' - -export class Client { - constructor(params) { - if (typeof params.secure !== 'undefined') { - throw new Error('"secure" option deprecated, "useSSL" should be used instead') - } - // Default values if not specified. - if (typeof params.useSSL === 'undefined') { - params.useSSL = true - } - if (!params.port) { - params.port = 0 - } - // Validate input params. - if (!isValidEndpoint(params.endPoint)) { - throw new errors.InvalidEndpointError(`Invalid endPoint : ${params.endPoint}`) - } - if (!isValidPort(params.port)) { - throw new errors.InvalidArgumentError(`Invalid port : ${params.port}`) - } - if (!isBoolean(params.useSSL)) { - throw new errors.InvalidArgumentError( - `Invalid useSSL flag type : ${params.useSSL}, expected to be of type "boolean"`, - ) - } - - // Validate region only if its set. - if (params.region) { - if (!isString(params.region)) { - throw new errors.InvalidArgumentError(`Invalid region : ${params.region}`) - } - } - - var host = params.endPoint.toLowerCase() - var port = params.port - var protocol = '' - var transport - var transportAgent - // Validate if configuration is not using SSL - // for constructing relevant endpoints. - if (params.useSSL === false) { - transport = Http - protocol = 'http:' - if (port === 0) { - port = 80 - } - transportAgent = Http.globalAgent - } else { - // Defaults to secure. - transport = Https - protocol = 'https:' - if (port === 0) { - port = 443 - } - transportAgent = Https.globalAgent - } - - // if custom transport is set, use it. - if (params.transport) { - if (!isObject(params.transport)) { - throw new errors.InvalidArgumentError( - `Invalid transport type : ${params.transport}, expected to be type "object"`, - ) - } - transport = params.transport - } - - // if custom transport agent is set, use it. - if (params.transportAgent) { - if (!isObject(params.transportAgent)) { - throw new errors.InvalidArgumentError( - `Invalid transportAgent type: ${params.transportAgent}, expected to be type "object"`, - ) - } - - transportAgent = params.transportAgent - } - - // User Agent should always following the below style. - // Please open an issue to discuss any new changes here. - // - // MinIO (OS; ARCH) LIB/VER APP/VER - // - var libraryComments = `(${process.platform}; ${process.arch})` - var libraryAgent = `MinIO ${libraryComments} minio-js/${Package.version}` - // User agent block ends. - - this.transport = transport - this.transportAgent = transportAgent - this.host = host - this.port = port - this.protocol = protocol - this.accessKey = params.accessKey - this.secretKey = params.secretKey - this.sessionToken = params.sessionToken - this.userAgent = `${libraryAgent}` - - // Default path style is true - if (params.pathStyle === undefined) { - this.pathStyle = true - } else { - this.pathStyle = params.pathStyle - } - - if (!this.accessKey) { - this.accessKey = '' - } - if (!this.secretKey) { - this.secretKey = '' - } - this.anonymous = !this.accessKey || !this.secretKey - - if (params.credentialsProvider) { - this.credentialsProvider = params.credentialsProvider - this.checkAndRefreshCreds() - } - - this.regionMap = {} - if (params.region) { - this.region = params.region - } - - this.partSize = 64 * 1024 * 1024 - if (params.partSize) { - this.partSize = params.partSize - this.overRidePartSize = true - } - if (this.partSize < 5 * 1024 * 1024) { - throw new errors.InvalidArgumentError(`Part size should be greater than 5MB`) - } - if (this.partSize > 5 * 1024 * 1024 * 1024) { - throw new errors.InvalidArgumentError(`Part size should be less than 5GB`) - } - - this.maximumPartSize = 5 * 1024 * 1024 * 1024 - this.maxObjectSize = 5 * 1024 * 1024 * 1024 * 1024 - // SHA256 is enabled only for authenticated http requests. If the request is authenticated - // and the connection is https we use x-amz-content-sha256=UNSIGNED-PAYLOAD - // header for signature calculation. - this.enableSHA256 = !this.anonymous && !params.useSSL - - this.s3AccelerateEndpoint = params.s3AccelerateEndpoint || null - this.reqOptions = {} - } - - // This is s3 Specific and does not hold validity in any other Object storage. - getAccelerateEndPointIfSet(bucketName, objectName) { - if (!_.isEmpty(this.s3AccelerateEndpoint) && !_.isEmpty(bucketName) && !_.isEmpty(objectName)) { - // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html - // Disable transfer acceleration for non-compliant bucket names. - if (bucketName.indexOf('.') !== -1) { - throw new Error(`Transfer Acceleration is not supported for non compliant bucket:${bucketName}`) - } - // If transfer acceleration is requested set new host. - // For more details about enabling transfer acceleration read here. - // http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html - return this.s3AccelerateEndpoint - } - return false - } - - /** - * @param endPoint _string_ valid S3 acceleration end point - */ - setS3TransferAccelerate(endPoint) { - this.s3AccelerateEndpoint = endPoint - } - - // Sets the supported request options. - setRequestOptions(options) { - if (!isObject(options)) { - throw new TypeError('request options should be of type "object"') - } - this.reqOptions = _.pick(options, [ - 'agent', - 'ca', - 'cert', - 'ciphers', - 'clientCertEngine', - 'crl', - 'dhparam', - 'ecdhCurve', - 'family', - 'honorCipherOrder', - 'key', - 'passphrase', - 'pfx', - 'rejectUnauthorized', - 'secureOptions', - 'secureProtocol', - 'servername', - 'sessionIdContext', - ]) - } - - // returns *options* object that can be used with http.request() - // Takes care of constructing virtual-host-style or path-style hostname - getRequestOptions(opts) { - var method = opts.method - var region = opts.region - var bucketName = opts.bucketName - var objectName = opts.objectName - var headers = opts.headers - var query = opts.query - - var reqOptions = { method } - reqOptions.headers = {} - - // If custom transportAgent was supplied earlier, we'll inject it here - reqOptions.agent = this.transportAgent - - // Verify if virtual host supported. - var virtualHostStyle - if (bucketName) { - virtualHostStyle = isVirtualHostStyle(this.host, this.protocol, bucketName, this.pathStyle) - } - - if (this.port) { - reqOptions.port = this.port - } - reqOptions.protocol = this.protocol - - if (objectName) { - objectName = `${uriResourceEscape(objectName)}` - } - - reqOptions.path = '/' - - // Save host. - reqOptions.host = this.host - // For Amazon S3 endpoint, get endpoint based on region. - if (isAmazonEndpoint(reqOptions.host)) { - const accelerateEndPoint = this.getAccelerateEndPointIfSet(bucketName, objectName) - if (accelerateEndPoint) { - reqOptions.host = `${accelerateEndPoint}` - } else { - reqOptions.host = getS3Endpoint(region) - } - } - - if (virtualHostStyle && !opts.pathStyle) { - // For all hosts which support virtual host style, `bucketName` - // is part of the hostname in the following format: - // - // var host = 'bucketName.example.com' - // - if (bucketName) { - reqOptions.host = `${bucketName}.${reqOptions.host}` - } - if (objectName) { - reqOptions.path = `/${objectName}` - } - } else { - // For all S3 compatible storage services we will fallback to - // path style requests, where `bucketName` is part of the URI - // path. - if (bucketName) { - reqOptions.path = `/${bucketName}` - } - if (objectName) { - reqOptions.path = `/${bucketName}/${objectName}` - } - } - - if (query) { - reqOptions.path += `?${query}` - } - reqOptions.headers.host = reqOptions.host - if ( - (reqOptions.protocol === 'http:' && reqOptions.port !== 80) || - (reqOptions.protocol === 'https:' && reqOptions.port !== 443) - ) { - reqOptions.headers.host = `${reqOptions.host}:${reqOptions.port}` - } - reqOptions.headers['user-agent'] = this.userAgent - if (headers) { - // have all header keys in lower case - to make signing easy - _.map(headers, (v, k) => (reqOptions.headers[k.toLowerCase()] = v)) - } - - // Use any request option specified in minioClient.setRequestOptions() - reqOptions = Object.assign({}, this.reqOptions, reqOptions) - - return reqOptions - } - - // Set application specific information. - // - // Generates User-Agent in the following style. - // - // MinIO (OS; ARCH) LIB/VER APP/VER - // - // __Arguments__ - // * `appName` _string_ - Application name. - // * `appVersion` _string_ - Application version. - setAppInfo(appName, appVersion) { - if (!isString(appName)) { - throw new TypeError(`Invalid appName: ${appName}`) - } - if (appName.trim() === '') { - throw new errors.InvalidArgumentError('Input appName cannot be empty.') - } - if (!isString(appVersion)) { - throw new TypeError(`Invalid appVersion: ${appVersion}`) - } - if (appVersion.trim() === '') { - throw new errors.InvalidArgumentError('Input appVersion cannot be empty.') - } - this.userAgent = `${this.userAgent} ${appName}/${appVersion}` - } - - // Calculate part size given the object size. Part size will be atleast this.partSize - calculatePartSize(size) { - if (!isNumber(size)) { - throw new TypeError('size should be of type "number"') - } - if (size > this.maxObjectSize) { - throw new TypeError(`size should not be more than ${this.maxObjectSize}`) - } - if (this.overRidePartSize) { - return this.partSize - } - var partSize = this.partSize - for (;;) { - // while(true) {...} throws linting error. - // If partSize is big enough to accomodate the object size, then use it. - if (partSize * 10000 > size) { - return partSize - } - // Try part sizes as 64MB, 80MB, 96MB etc. - partSize += 16 * 1024 * 1024 - } - } - - // log the request, response, error - logHTTP(reqOptions, response, err) { - // if no logstreamer available return. - if (!this.logStream) { - return - } - if (!isObject(reqOptions)) { - throw new TypeError('reqOptions should be of type "object"') - } - if (response && !isReadableStream(response)) { - throw new TypeError('response should be of type "Stream"') - } - if (err && !(err instanceof Error)) { - throw new TypeError('err should be of type "Error"') - } - var logHeaders = (headers) => { - _.forEach(headers, (v, k) => { - if (k == 'authorization') { - var redacter = new RegExp('Signature=([0-9a-f]+)') - v = v.replace(redacter, 'Signature=**REDACTED**') - } - this.logStream.write(`${k}: ${v}\n`) - }) - this.logStream.write('\n') - } - this.logStream.write(`REQUEST: ${reqOptions.method} ${reqOptions.path}\n`) - logHeaders(reqOptions.headers) - if (response) { - this.logStream.write(`RESPONSE: ${response.statusCode}\n`) - logHeaders(response.headers) - } - if (err) { - this.logStream.write('ERROR BODY:\n') - var errJSON = JSON.stringify(err, null, '\t') - this.logStream.write(`${errJSON}\n`) - } - } - - // Enable tracing - traceOn(stream) { - if (!stream) { - stream = process.stdout - } - this.logStream = stream - } - - // Disable tracing - traceOff() { - this.logStream = null - } - - // makeRequest is the primitive used by the apis for making S3 requests. - // payload can be empty string in case of no payload. - // statusCode is the expected statusCode. If response.statusCode does not match - // we parse the XML error and call the callback with the error message. - // A valid region is passed by the calls - listBuckets, makeBucket and - // getBucketRegion. - makeRequest(options, payload, statusCodes, region, returnResponse, cb) { - if (!isObject(options)) { - throw new TypeError('options should be of type "object"') - } - if (!isString(payload) && !isObject(payload)) { - // Buffer is of type 'object' - throw new TypeError('payload should be of type "string" or "Buffer"') - } - statusCodes.forEach((statusCode) => { - if (!isNumber(statusCode)) { - throw new TypeError('statusCode should be of type "number"') - } - }) - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isBoolean(returnResponse)) { - throw new TypeError('returnResponse should be of type "boolean"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - if (!options.headers) { - options.headers = {} - } - if (options.method === 'POST' || options.method === 'PUT' || options.method === 'DELETE') { - options.headers['content-length'] = payload.length - } - var sha256sum = '' - if (this.enableSHA256) { - sha256sum = toSha256(payload) - } - var stream = readableStream(payload) - this.makeRequestStream(options, stream, sha256sum, statusCodes, region, returnResponse, cb) - } - - // makeRequestStream will be used directly instead of makeRequest in case the payload - // is available as a stream. for ex. putObject - makeRequestStream(options, stream, sha256sum, statusCodes, region, returnResponse, cb) { - if (!isObject(options)) { - throw new TypeError('options should be of type "object"') - } - if (!isReadableStream(stream)) { - throw new errors.InvalidArgumentError('stream should be a readable Stream') - } - if (!isString(sha256sum)) { - throw new TypeError('sha256sum should be of type "string"') - } - statusCodes.forEach((statusCode) => { - if (!isNumber(statusCode)) { - throw new TypeError('statusCode should be of type "number"') - } - }) - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isBoolean(returnResponse)) { - throw new TypeError('returnResponse should be of type "boolean"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - // sha256sum will be empty for anonymous or https requests - if (!this.enableSHA256 && sha256sum.length !== 0) { - throw new errors.InvalidArgumentError(`sha256sum expected to be empty for anonymous or https requests`) - } - // sha256sum should be valid for non-anonymous http requests. - if (this.enableSHA256 && sha256sum.length !== 64) { - throw new errors.InvalidArgumentError(`Invalid sha256sum : ${sha256sum}`) - } - - var _makeRequest = (e, region) => { - if (e) { - return cb(e) - } - options.region = region - var reqOptions = this.getRequestOptions(options) - if (!this.anonymous) { - // For non-anonymous https requests sha256sum is 'UNSIGNED-PAYLOAD' for signature calculation. - if (!this.enableSHA256) { - sha256sum = 'UNSIGNED-PAYLOAD' - } - - let date = new Date() - - reqOptions.headers['x-amz-date'] = makeDateLong(date) - reqOptions.headers['x-amz-content-sha256'] = sha256sum - if (this.sessionToken) { - reqOptions.headers['x-amz-security-token'] = this.sessionToken - } - - this.checkAndRefreshCreds() - var authorization = signV4(reqOptions, this.accessKey, this.secretKey, region, date) - reqOptions.headers.authorization = authorization - } - var req = this.transport.request(reqOptions, (response) => { - if (!statusCodes.includes(response.statusCode)) { - // For an incorrect region, S3 server always sends back 400. - // But we will do cache invalidation for all errors so that, - // in future, if AWS S3 decides to send a different status code or - // XML error code we will still work fine. - delete this.regionMap[options.bucketName] - var errorTransformer = transformers.getErrorTransformer(response) - pipesetup(response, errorTransformer).on('error', (e) => { - this.logHTTP(reqOptions, response, e) - cb(e) - }) - return - } - this.logHTTP(reqOptions, response) - if (returnResponse) { - return cb(null, response) - } - // We drain the socket so that the connection gets closed. Note that this - // is not expensive as the socket will not have any data. - response.on('data', () => {}) - cb(null) - }) - let pipe = pipesetup(stream, req) - pipe.on('error', (e) => { - this.logHTTP(reqOptions, null, e) - cb(e) - }) - } - if (region) { - return _makeRequest(null, region) - } - this.getBucketRegion(options.bucketName, _makeRequest) - } - - // gets the region of the bucket - getBucketRegion(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - - // Region is set with constructor, return the region right here. - if (this.region) { - return cb(null, this.region) - } - - if (this.regionMap[bucketName]) { - return cb(null, this.regionMap[bucketName]) - } - var extractRegion = (response) => { - var transformer = transformers.getBucketRegionTransformer() - var region = DEFAULT_REGION - pipesetup(response, transformer) - .on('error', cb) - .on('data', (data) => { - if (data) { - region = data - } - }) - .on('end', () => { - this.regionMap[bucketName] = region - cb(null, region) - }) - } - - var method = 'GET' - var query = 'location' - - // `getBucketLocation` behaves differently in following ways for - // different environments. - // - // - For nodejs env we default to path style requests. - // - For browser env path style requests on buckets yields CORS - // error. To circumvent this problem we make a virtual host - // style request signed with 'us-east-1'. This request fails - // with an error 'AuthorizationHeaderMalformed', additionally - // the error XML also provides Region of the bucket. To validate - // this region is proper we retry the same request with the newly - // obtained region. - var pathStyle = this.pathStyle && typeof window === 'undefined' - - this.makeRequest({ method, bucketName, query, pathStyle }, '', [200], DEFAULT_REGION, true, (e, response) => { - if (e) { - if (e.name === 'AuthorizationHeaderMalformed') { - var region = e.Region - if (!region) { - return cb(e) - } - this.makeRequest({ method, bucketName, query }, '', [200], region, true, (e, response) => { - if (e) { - return cb(e) - } - extractRegion(response) - }) - return - } - return cb(e) - } - extractRegion(response) - }) - } - - // Creates the bucket `bucketName`. - // - // __Arguments__ - // * `bucketName` _string_ - Name of the bucket - // * `region` _string_ - region valid values are _us-west-1_, _us-west-2_, _eu-west-1_, _eu-central-1_, _ap-southeast-1_, _ap-northeast-1_, _ap-southeast-2_, _sa-east-1_. - // * `makeOpts` _object_ - Options to create a bucket. e.g {ObjectLocking:true} (Optional) - // * `callback(err)` _function_ - callback function with `err` as the error argument. `err` is null if the bucket is successfully created. - makeBucket(bucketName, region, makeOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - // Backward Compatibility - if (isObject(region)) { - cb = makeOpts - makeOpts = region - region = '' - } - if (isFunction(region)) { - cb = region - region = '' - makeOpts = {} - } - if (isFunction(makeOpts)) { - cb = makeOpts - makeOpts = {} - } - - if (!isString(region)) { - throw new TypeError('region should be of type "string"') - } - if (!isObject(makeOpts)) { - throw new TypeError('makeOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var payload = '' - - // Region already set in constructor, validate if - // caller requested bucket location is same. - if (region && this.region) { - if (region !== this.region) { - throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`) - } - } - // sending makeBucket request with XML containing 'us-east-1' fails. For - // default region server expects the request without body - if (region && region !== DEFAULT_REGION) { - var createBucketConfiguration = [] - createBucketConfiguration.push({ - _attr: { - xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/', - }, - }) - createBucketConfiguration.push({ - LocationConstraint: region, - }) - var payloadObject = { - CreateBucketConfiguration: createBucketConfiguration, - } - payload = Xml(payloadObject) - } - var method = 'PUT' - var headers = {} - - if (makeOpts.ObjectLocking) { - headers['x-amz-bucket-object-lock-enabled'] = true - } - - if (!region) { - region = DEFAULT_REGION - } - - const processWithRetry = (err) => { - if (err && (region === '' || region === DEFAULT_REGION)) { - if (err.code === 'AuthorizationHeaderMalformed' && err.region !== '') { - // Retry with region returned as part of error - this.makeRequest({ method, bucketName, headers }, payload, [200], err.region, false, cb) - } else { - return cb && cb(err) - } - } - return cb && cb(err) - } - this.makeRequest({ method, bucketName, headers }, payload, [200], region, false, processWithRetry) - } - - // List of buckets created. - // - // __Arguments__ - // * `callback(err, buckets)` _function_ - callback function with error as the first argument. `buckets` is an array of bucket information - // - // `buckets` array element: - // * `bucket.name` _string_ : bucket name - // * `bucket.creationDate` _Date_: date when bucket was created - listBuckets(cb) { - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'GET' - this.makeRequest({ method }, '', [200], DEFAULT_REGION, true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getListBucketTransformer() - var buckets - pipesetup(response, transformer) - .on('data', (result) => (buckets = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, buckets)) - }) - } - - // Returns a stream that emits objects that are partially uploaded. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: prefix of the object names that are partially uploaded (optional, default `''`) - // * `recursive` _bool_: directory style listing when false, recursive listing when true (optional, default `false`) - // - // __Return Value__ - // * `stream` _Stream_ : emits objects of the format: - // * `object.key` _string_: name of the object - // * `object.uploadId` _string_: upload ID of the object - // * `object.size` _Integer_: size of the partially uploaded object - listIncompleteUploads(bucket, prefix, recursive) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (!isValidBucketName(bucket)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - var delimiter = recursive ? '' : '/' - var keyMarker = '' - var uploadIdMarker = '' - var uploads = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one upload info per _read() - if (uploads.length) { - return readStream.push(uploads.shift()) - } - if (ended) { - return readStream.push(null) - } - this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - result.prefixes.forEach((prefix) => uploads.push(prefix)) - async.eachSeries( - result.uploads, - (upload, cb) => { - // for each incomplete upload add the sizes of its uploaded parts - this.listParts(bucket, upload.key, upload.uploadId, (err, parts) => { - if (err) { - return cb(err) - } - upload.size = parts.reduce((acc, item) => acc + item.size, 0) - uploads.push(upload) - cb() - }) - }, - (err) => { - if (err) { - readStream.emit('error', err) - return - } - if (result.isTruncated) { - keyMarker = result.nextKeyMarker - uploadIdMarker = result.nextUploadIdMarker - } else { - ended = true - } - readStream._read() - }, - ) - }) - } - return readStream - } - - // To check if a bucket already exists. - // - // __Arguments__ - // * `bucketName` _string_ : name of the bucket - // * `callback(err)` _function_ : `err` is `null` if the bucket exists - bucketExists(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'HEAD' - this.makeRequest({ method, bucketName }, '', [200], '', false, (err) => { - if (err) { - if (err.code == 'NoSuchBucket' || err.code == 'NotFound') { - return cb(null, false) - } - return cb(err) - } - cb(null, true) - }) - } - - // Remove a bucket. - // - // __Arguments__ - // * `bucketName` _string_ : name of the bucket - // * `callback(err)` _function_ : `err` is `null` if the bucket is removed successfully. - removeBucket(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'DELETE' - this.makeRequest({ method, bucketName }, '', [204], '', false, (e) => { - // If the bucket was successfully removed, remove the region map entry. - if (!e) { - delete this.regionMap[bucketName] - } - cb(e) - }) - } - - // Remove the partially uploaded object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `callback(err)` _function_: callback function is called with non `null` value in case of error - removeIncompleteUpload(bucketName, objectName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.IsValidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var removeUploadId - async.during( - (cb) => { - this.findUploadId(bucketName, objectName, (e, uploadId) => { - if (e) { - return cb(e) - } - removeUploadId = uploadId - cb(null, uploadId) - }) - }, - (cb) => { - var method = 'DELETE' - var query = `uploadId=${removeUploadId}` - this.makeRequest({ method, bucketName, objectName, query }, '', [204], '', false, (e) => cb(e)) - }, - cb, - ) - } - - // Callback is called with `error` in case of error or `null` in case of success - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `filePath` _string_: path to which the object data will be written to - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err)` _function_: callback is called with `err` in case of error. - fGetObject(bucketName, objectName, filePath, getOpts = {}, cb) { - // Input validation. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(filePath)) { - throw new TypeError('filePath should be of type "string"') - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - // Internal data. - var partFile - var partFileStream - var objStat - - // Rename wrapper. - var rename = (err) => { - if (err) { - return cb(err) - } - fs.rename(partFile, filePath, cb) - } - - async.waterfall( - [ - (cb) => this.statObject(bucketName, objectName, getOpts, cb), - (result, cb) => { - objStat = result - // Create any missing top level directories. - mkdirp(path.dirname(filePath), cb) - }, - (ignore, cb) => { - partFile = `${filePath}.${objStat.etag}.part.minio` - fs.stat(partFile, (e, stats) => { - var offset = 0 - if (e) { - partFileStream = fs.createWriteStream(partFile, { flags: 'w' }) - } else { - if (objStat.size === stats.size) { - return rename() - } - offset = stats.size - partFileStream = fs.createWriteStream(partFile, { flags: 'a' }) - } - this.getPartialObject(bucketName, objectName, offset, 0, getOpts, cb) - }) - }, - (downloadStream, cb) => { - pipesetup(downloadStream, partFileStream) - .on('error', (e) => cb(e)) - .on('finish', cb) - }, - (cb) => fs.stat(partFile, cb), - (stats, cb) => { - if (stats.size === objStat.size) { - return cb() - } - cb(new Error('Size mismatch between downloaded file and the object')) - }, - ], - rename, - ) - } - - // Callback is called with readable stream of the object content. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream - getObject(bucketName, objectName, getOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - this.getPartialObject(bucketName, objectName, 0, 0, getOpts, cb) - } - - // Callback is called with readable stream of the partial object content. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `offset` _number_: offset of the object from where the stream will start - // * `length` _number_: length of the object that will be read in the stream (optional, if not specified we read the rest of the file from the offset) - // * `getOpts` _object_: Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional) - // * `callback(err, stream)` _function_: callback is called with `err` in case of error. `stream` is the object content stream - getPartialObject(bucketName, objectName, offset, length, getOpts = {}, cb) { - if (isFunction(length)) { - cb = length - length = 0 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isNumber(offset)) { - throw new TypeError('offset should be of type "number"') - } - if (!isNumber(length)) { - throw new TypeError('length should be of type "number"') - } - // Backward Compatibility - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var range = '' - if (offset || length) { - if (offset) { - range = `bytes=${+offset}-` - } else { - range = 'bytes=0-' - offset = 0 - } - if (length) { - range += `${+length + offset - 1}` - } - } - - var headers = {} - if (range !== '') { - headers.range = range - } - - var expectedStatusCodes = [200] - if (range) { - expectedStatusCodes.push(206) - } - var method = 'GET' - - var query = querystring.stringify(getOpts) - this.makeRequest({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes, '', true, cb) - } - - // Uploads the object using contents from a file - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `filePath` _string_: file path of the file to be uploaded - // * `metaData` _Javascript Object_: metaData assosciated with the object - // * `callback(err, objInfo)` _function_: non null `err` indicates error, `objInfo` _object_ which contains versionId and etag. - fPutObject(bucketName, objectName, filePath, metaData, callback) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (!isString(filePath)) { - throw new TypeError('filePath should be of type "string"') - } - if (isFunction(metaData)) { - callback = metaData - metaData = {} // Set metaData empty if no metaData provided. - } - if (!isObject(metaData)) { - throw new TypeError('metaData should be of type "object"') - } - - // Inserts correct `content-type` attribute based on metaData and filePath - metaData = insertContentType(metaData, filePath) - - // Updates metaData to have the correct prefix if needed - metaData = prependXAMZMeta(metaData) - var size - var partSize - - async.waterfall( - [ - (cb) => fs.stat(filePath, cb), - (stats, cb) => { - size = stats.size - var stream - var cbTriggered = false - var origCb = cb - cb = function () { - if (cbTriggered) { - return - } - cbTriggered = true - if (stream) { - stream.destroy() - } - return origCb.apply(this, arguments) - } - if (size > this.maxObjectSize) { - return cb(new Error(`${filePath} size : ${stats.size}, max allowed size : 5TB`)) - } - if (size <= this.partSize) { - // simple PUT request, no multipart - var multipart = false - var uploader = this.getUploader(bucketName, objectName, metaData, multipart) - var hash = transformers.getHashSummer(this.enableSHA256) - var start = 0 - var end = size - 1 - var autoClose = true - if (size === 0) { - end = 0 - } - var options = { start, end, autoClose } - pipesetup(fs.createReadStream(filePath, options), hash) - .on('data', (data) => { - var md5sum = data.md5sum - var sha256sum = data.sha256sum - stream = fs.createReadStream(filePath, options) - uploader(stream, size, sha256sum, md5sum, (err, objInfo) => { - callback(err, objInfo) - cb(true) - }) - }) - .on('error', (e) => cb(e)) - return - } - this.findUploadId(bucketName, objectName, cb) - }, - (uploadId, cb) => { - // if there was a previous incomplete upload, fetch all its uploaded parts info - if (uploadId) { - return this.listParts(bucketName, objectName, uploadId, (e, etags) => cb(e, uploadId, etags)) - } - // there was no previous upload, initiate a new one - this.initiateNewMultipartUpload(bucketName, objectName, metaData, (e, uploadId) => cb(e, uploadId, [])) - }, - (uploadId, etags, cb) => { - partSize = this.calculatePartSize(size) - var multipart = true - var uploader = this.getUploader(bucketName, objectName, metaData, multipart) - - // convert array to object to make things easy - var parts = etags.reduce(function (acc, item) { - if (!acc[item.part]) { - acc[item.part] = item - } - return acc - }, {}) - var partsDone = [] - var partNumber = 1 - var uploadedSize = 0 - async.whilst( - (cb) => { - cb(null, uploadedSize < size) - }, - (cb) => { - var stream - var cbTriggered = false - var origCb = cb - cb = function () { - if (cbTriggered) { - return - } - cbTriggered = true - if (stream) { - stream.destroy() - } - return origCb.apply(this, arguments) - } - var part = parts[partNumber] - var hash = transformers.getHashSummer(this.enableSHA256) - var length = partSize - if (length > size - uploadedSize) { - length = size - uploadedSize - } - var start = uploadedSize - var end = uploadedSize + length - 1 - var autoClose = true - var options = { autoClose, start, end } - // verify md5sum of each part - pipesetup(fs.createReadStream(filePath, options), hash) - .on('data', (data) => { - var md5sumHex = Buffer.from(data.md5sum, 'base64').toString('hex') - if (part && md5sumHex === part.etag) { - // md5 matches, chunk already uploaded - partsDone.push({ part: partNumber, etag: part.etag }) - partNumber++ - uploadedSize += length - return cb() - } - // part is not uploaded yet, or md5 mismatch - stream = fs.createReadStream(filePath, options) - uploader(uploadId, partNumber, stream, length, data.sha256sum, data.md5sum, (e, objInfo) => { - if (e) { - return cb(e) - } - partsDone.push({ part: partNumber, etag: objInfo.etag }) - partNumber++ - uploadedSize += length - return cb() - }) - }) - .on('error', (e) => cb(e)) - }, - (e) => { - if (e) { - return cb(e) - } - cb(null, partsDone, uploadId) - }, - ) - }, - // all parts uploaded, complete the multipart upload - (etags, uploadId, cb) => this.completeMultipartUpload(bucketName, objectName, uploadId, etags, cb), - ], - (err, ...rest) => { - if (err === true) { - return - } - callback(err, ...rest) - }, - ) - } - - // Uploads the object. - // - // Uploading a stream - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `stream` _Stream_: Readable stream - // * `size` _number_: size of the object (optional) - // * `callback(err, etag)` _function_: non null `err` indicates error, `etag` _string_ is the etag of the object uploaded. - // - // Uploading "Buffer" or "string" - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `string or Buffer` _string_ or _Buffer_: string or buffer - // * `callback(err, objInfo)` _function_: `err` is `null` in case of success and `info` will have the following object details: - // * `etag` _string_: etag of the object - // * `versionId` _string_: versionId of the object - putObject(bucketName, objectName, stream, size, metaData, callback) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - // We'll need to shift arguments to the left because of size and metaData. - if (isFunction(size)) { - callback = size - metaData = {} - } else if (isFunction(metaData)) { - callback = metaData - metaData = {} - } - - // We'll need to shift arguments to the left because of metaData - // and size being optional. - if (isObject(size)) { - metaData = size - } - - // Ensures Metadata has appropriate prefix for A3 API - metaData = prependXAMZMeta(metaData) - if (typeof stream === 'string' || stream instanceof Buffer) { - // Adapts the non-stream interface into a stream. - size = stream.length - stream = readableStream(stream) - } else if (!isReadableStream(stream)) { - throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"') - } - - if (!isFunction(callback)) { - throw new TypeError('callback should be of type "function"') - } - - if (isNumber(size) && size < 0) { - throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`) - } - - // Get the part size and forward that to the BlockStream. Default to the - // largest block size possible if necessary. - if (!isNumber(size)) { - size = this.maxObjectSize - } - - size = this.calculatePartSize(size) - - // s3 requires that all non-end chunks be at least `this.partSize`, - // so we chunk the stream until we hit either that size or the end before - // we flush it to s3. - let chunker = new BlockStream2({ size, zeroPadding: false }) - - // This is a Writable stream that can be written to in order to upload - // to the specified bucket and object automatically. - let uploader = new ObjectUploader(this, bucketName, objectName, size, metaData, callback) - // stream => chunker => uploader - pipesetup(stream, chunker, uploader) - } - - // Copy the object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `srcObject` _string_: path of the source object to be copied - // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) - // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object - copyObjectV1(arg1, arg2, arg3, arg4, arg5) { - var bucketName = arg1 - var objectName = arg2 - var srcObject = arg3 - var conditions, cb - if (typeof arg4 == 'function' && arg5 === undefined) { - conditions = null - cb = arg4 - } else { - conditions = arg4 - cb = arg5 - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(srcObject)) { - throw new TypeError('srcObject should be of type "string"') - } - if (srcObject === '') { - throw new errors.InvalidPrefixError(`Empty source prefix`) - } - - if (conditions !== null && !(conditions instanceof CopyConditions)) { - throw new TypeError('conditions should be of type "CopyConditions"') - } - - var headers = {} - headers['x-amz-copy-source'] = uriResourceEscape(srcObject) - - if (conditions !== null) { - if (conditions.modified !== '') { - headers['x-amz-copy-source-if-modified-since'] = conditions.modified - } - if (conditions.unmodified !== '') { - headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified - } - if (conditions.matchETag !== '') { - headers['x-amz-copy-source-if-match'] = conditions.matchETag - } - if (conditions.matchEtagExcept !== '') { - headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept - } - } - - var method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => cb(null, data)) - }) - } - - /** - * Internal Method to perform copy of an object. - * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions - * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions - * @param cb __function__ called with null if there is an error - * @returns Promise if no callack is passed. - */ - copyObjectV2(sourceConfig, destConfig, cb) { - if (!(sourceConfig instanceof CopySourceOptions)) { - throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') - } - if (!(destConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - if (!destConfig.validate()) { - return false - } - if (!destConfig.validate()) { - return false - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) - - const bucketName = destConfig.Bucket - const objectName = destConfig.Object - - const method = 'PUT' - this.makeRequest({ method, bucketName, objectName, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - const transformer = transformers.getCopyObjectTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => { - const resHeaders = response.headers - - const copyObjResponse = { - Bucket: destConfig.Bucket, - Key: destConfig.Object, - LastModified: data.LastModified, - MetaData: extractMetadata(resHeaders), - VersionId: getVersionId(resHeaders), - SourceVersionId: getSourceVersionId(resHeaders), - Etag: sanitizeETag(resHeaders.etag), - Size: +resHeaders['content-length'], - } - - return cb(null, copyObjResponse) - }) - }) - } - - // Backward compatibility for Copy Object API. - copyObject(...allArgs) { - if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { - return this.copyObjectV2(...arguments) - } - return this.copyObjectV1(...arguments) - } - - // list a batch of objects - listObjectsQuery(bucketName, prefix, marker, listQueryOpts = {}) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(marker)) { - throw new TypeError('marker should be of type "string"') - } - let { Delimiter, MaxKeys, IncludeVersion } = listQueryOpts - - if (!isObject(listQueryOpts)) { - throw new TypeError('listQueryOpts should be of type "object"') - } - - if (!isString(Delimiter)) { - throw new TypeError('Delimiter should be of type "string"') - } - if (!isNumber(MaxKeys)) { - throw new TypeError('MaxKeys should be of type "number"') - } - - const queries = [] - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(Delimiter)}`) - queries.push(`encoding-type=url`) - - if (IncludeVersion) { - queries.push(`versions`) - } - - if (marker) { - marker = uriEscape(marker) - if (IncludeVersion) { - queries.push(`key-marker=${marker}`) - } else { - queries.push(`marker=${marker}`) - } - } - - // no need to escape maxKeys - if (MaxKeys) { - if (MaxKeys >= 1000) { - MaxKeys = 1000 - } - queries.push(`max-keys=${MaxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - - var method = 'GET' - var transformer = transformers.getListObjectsTransformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // List the objects in the bucket. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `listOpts _object_: query params to list object with below keys - // * listOpts.MaxKeys _int_ maximum number of keys to return - // * listOpts.IncludeVersion _bool_ true|false to include versions. - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - // * `obj.isDeleteMarker` _boolean_: true if it is a delete marker - // * `obj.versionId` _string_: versionId of the object - listObjects(bucketName, prefix, recursive, listOpts = {}) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isObject(listOpts)) { - throw new TypeError('listOpts should be of type "object"') - } - var marker = '' - const listQueryOpts = { - Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/' - MaxKeys: 1000, - IncludeVersion: listOpts.IncludeVersion, - } - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - marker = result.nextMarker || result.versionIdMarker - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // listObjectsV2Query - (List Objects V2) - List some or all (up to 1000) of the objects in a bucket. - // - // You can use the request parameters as selection criteria to return a subset of the objects in a bucket. - // request parameters :- - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: Limits the response to keys that begin with the specified prefix. - // * `continuation-token` _string_: Used to continue iterating over a set of objects. - // * `delimiter` _string_: A delimiter is a character you use to group keys. - // * `max-keys` _number_: Sets the maximum number of keys returned in the response body. - // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. - listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(continuationToken)) { - throw new TypeError('continuationToken should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - if (!isNumber(maxKeys)) { - throw new TypeError('maxKeys should be of type "number"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - var queries = [] - - // Call for listing objects v2 API - queries.push(`list-type=2`) - queries.push(`encoding-type=url`) - - // escape every value in query string, except maxKeys - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - - if (continuationToken) { - continuationToken = uriEscape(continuationToken) - queries.push(`continuation-token=${continuationToken}`) - } - // Set start-after - if (startAfter) { - startAfter = uriEscape(startAfter) - queries.push(`start-after=${startAfter}`) - } - // no need to escape maxKeys - if (maxKeys) { - if (maxKeys >= 1000) { - maxKeys = 1000 - } - queries.push(`max-keys=${maxKeys}`) - } - queries.sort() - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListObjectsV2Transformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // List the objects in the bucket using S3 ListObjects V2 - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) - // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) - // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) - // - // __Return Value__ - // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: - // * `obj.name` _string_: name of the object - // * `obj.prefix` _string_: name of the object prefix - // * `obj.size` _number_: size of the object - // * `obj.etag` _string_: etag of the object - // * `obj.lastModified` _Date_: modified time stamp - listObjectsV2(bucketName, prefix, recursive, startAfter) { - if (prefix === undefined) { - prefix = '' - } - if (recursive === undefined) { - recursive = false - } - if (startAfter === undefined) { - startAfter = '' - } - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isBoolean(recursive)) { - throw new TypeError('recursive should be of type "boolean"') - } - if (!isString(startAfter)) { - throw new TypeError('startAfter should be of type "string"') - } - // if recursive is false set delimiter to '/' - var delimiter = recursive ? '' : '/' - var continuationToken = '' - var objects = [] - var ended = false - var readStream = Stream.Readable({ objectMode: true }) - readStream._read = () => { - // push one object per _read() - if (objects.length) { - readStream.push(objects.shift()) - return - } - if (ended) { - return readStream.push(null) - } - // if there are no objects to push do query for the next batch of objects - this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter) - .on('error', (e) => readStream.emit('error', e)) - .on('data', (result) => { - if (result.isTruncated) { - continuationToken = result.nextContinuationToken - } else { - ended = true - } - objects = result.objects - readStream._read() - }) - } - return readStream - } - - // Stat information of the object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `statOpts` _object_ : Version of the object in the form `{versionId:'my-uuid'}`. Default is `{}`. (optional). - // * `callback(err, stat)` _function_: `err` is not `null` in case of error, `stat` contains the object information: - // * `stat.size` _number_: size of the object - // * `stat.etag` _string_: etag of the object - // * `stat.metaData` _string_: MetaData of the object - // * `stat.lastModified` _Date_: modified time stamp - // * `stat.versionId` _string_: version id of the object if available - statObject(bucketName, objectName, statOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // backward compatibility - if (isFunction(statOpts)) { - cb = statOpts - statOpts = {} - } - - if (!isObject(statOpts)) { - throw new errors.InvalidArgumentError('statOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var query = querystring.stringify(statOpts) - var method = 'HEAD' - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - // We drain the socket so that the connection gets closed. Note that this - // is not expensive as the socket will not have any data. - response.on('data', () => {}) - - const result = { - size: +response.headers['content-length'], - metaData: extractMetadata(response.headers), - lastModified: new Date(response.headers['last-modified']), - versionId: getVersionId(response.headers), - etag: sanitizeETag(response.headers.etag), - } - - cb(null, result) - }) - } - - // Remove the specified object. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `removeOpts` _object_: Version of the object in the form `{versionId:'my-uuid', governanceBypass:true|false, forceDelete:true|false}`. Default is `{}`. (optional) - // * `callback(err)` _function_: callback function is called with non `null` value in case of error - removeObject(bucketName, objectName, removeOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - // backward compatibility - if (isFunction(removeOpts)) { - cb = removeOpts - removeOpts = {} - } - - if (!isObject(removeOpts)) { - throw new errors.InvalidArgumentError('removeOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - const method = 'DELETE' - const queryParams = {} - - if (removeOpts.versionId) { - queryParams.versionId = `${removeOpts.versionId}` - } - const headers = {} - if (removeOpts.governanceBypass) { - headers['X-Amz-Bypass-Governance-Retention'] = true - } - if (removeOpts.forceDelete) { - headers['x-minio-force-delete'] = true - } - - const query = querystring.stringify(queryParams) - - let requestOptions = { method, bucketName, objectName, headers } - if (query) { - requestOptions['query'] = query - } - - this.makeRequest(requestOptions, '', [200, 204], '', false, cb) - } - - // Remove all the objects residing in the objectsList. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectsList` _array_: array of objects of one of the following: - // * List of Object names as array of strings which are object keys: ['objectname1','objectname2'] - // * List of Object name and versionId as an object: [{name:"objectname",versionId:"my-version-id"}] - - removeObjects(bucketName, objectsList, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isArray(objectsList)) { - throw new errors.InvalidArgumentError('objectsList should be a list') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const maxEntries = 1000 - const query = 'delete' - const method = 'POST' - - let result = objectsList.reduce( - (result, entry) => { - result.list.push(entry) - if (result.list.length === maxEntries) { - result.listOfList.push(result.list) - result.list = [] - } - return result - }, - { listOfList: [], list: [] }, - ) - - if (result.list.length > 0) { - result.listOfList.push(result.list) - } - - const encoder = new TextEncoder() - const batchResults = [] - - async.eachSeries( - result.listOfList, - (list, batchCb) => { - var objects = [] - list.forEach(function (value) { - if (isObject(value)) { - objects.push({ Key: value.name, VersionId: value.versionId }) - } else { - objects.push({ Key: value }) - } - }) - let deleteObjects = { Delete: { Quiet: true, Object: objects } } - const builder = new xml2js.Builder({ headless: true }) - let payload = builder.buildObject(deleteObjects) - payload = encoder.encode(payload) - const headers = {} - - headers['Content-MD5'] = toMd5(payload) - - let removeObjectsResult - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', true, (e, response) => { - if (e) { - return batchCb(e) - } - pipesetup(response, transformers.removeObjectsTransformer()) - .on('data', (data) => { - removeObjectsResult = data - }) - .on('error', (e) => { - return batchCb(e, null) - }) - .on('end', () => { - batchResults.push(removeObjectsResult) - return batchCb(null, removeObjectsResult) - }) - }) - }, - () => { - cb(null, _.flatten(batchResults)) - }, - ) - } - - // Get the policy on a bucket or an object prefix. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `callback(err, policy)` _function_: callback function - getBucketPolicy(bucketName, cb) { - // Validate arguments. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let method = 'GET' - let query = 'policy' - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let policy = Buffer.from('') - pipesetup(response, transformers.getConcater()) - .on('data', (data) => (policy = data)) - .on('error', cb) - .on('end', () => { - cb(null, policy.toString()) - }) - }) - } - - // Set the policy on a bucket or an object prefix. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `bucketPolicy` _string_: bucket policy (JSON stringify'ed) - // * `callback(err)` _function_: callback function - setBucketPolicy(bucketName, policy, cb) { - // Validate arguments. - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isString(policy)) { - throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let method = 'DELETE' - let query = 'policy' - - if (policy) { - method = 'PUT' - } - - this.makeRequest({ method, bucketName, query }, policy, [204], '', false, cb) - } - - // Generate a generic presigned URL which can be - // used for HTTP methods GET, PUT, HEAD and DELETE - // - // __Arguments__ - // * `method` _string_: name of the HTTP method - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - // * `reqParams` _object_: request parameters (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} - // * `requestDate` _Date_: A date object, the url will be issued at (optional) - presignedUrl(method, bucketName, objectName, expires, reqParams, requestDate, cb) { - if (this.anonymous) { - throw new errors.AnonymousRequestError('Presigned ' + method + ' url cannot be generated for anonymous requests') - } - if (isFunction(requestDate)) { - cb = requestDate - requestDate = new Date() - } - if (isFunction(reqParams)) { - cb = reqParams - reqParams = {} - requestDate = new Date() - } - if (isFunction(expires)) { - cb = expires - reqParams = {} - expires = 24 * 60 * 60 * 7 // 7 days in seconds - requestDate = new Date() - } - if (!isNumber(expires)) { - throw new TypeError('expires should be of type "number"') - } - if (!isObject(reqParams)) { - throw new TypeError('reqParams should be of type "object"') - } - if (!isValidDate(requestDate)) { - throw new TypeError('requestDate should be of type "Date" and valid') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var query = querystring.stringify(reqParams) - this.getBucketRegion(bucketName, (e, region) => { - if (e) { - return cb(e) - } - // This statement is added to ensure that we send error through - // callback on presign failure. - var url - var reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query }) - - this.checkAndRefreshCreds() - try { - url = presignSignatureV4( - reqOptions, - this.accessKey, - this.secretKey, - this.sessionToken, - region, - requestDate, - expires, - ) - } catch (pe) { - return cb(pe) - } - cb(null, url) - }) - } - - // Generate a presigned URL for GET - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - // * `respHeaders` _object_: response headers to override or request params for query (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} - // * `requestDate` _Date_: A date object, the url will be issued at (optional) - presignedGetObject(bucketName, objectName, expires, respHeaders, requestDate, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (isFunction(respHeaders)) { - cb = respHeaders - respHeaders = {} - requestDate = new Date() - } - - var validRespHeaders = [ - 'response-content-type', - 'response-content-language', - 'response-expires', - 'response-cache-control', - 'response-content-disposition', - 'response-content-encoding', - ] - validRespHeaders.forEach((header) => { - if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) { - throw new TypeError(`response header ${header} should be of type "string"`) - } - }) - return this.presignedUrl('GET', bucketName, objectName, expires, respHeaders, requestDate, cb) - } - - // Generate a presigned URL for PUT. Using this URL, the browser can upload to S3 only with the specified object name. - // - // __Arguments__ - // * `bucketName` _string_: name of the bucket - // * `objectName` _string_: name of the object - // * `expiry` _number_: expiry in seconds (optional, default 7 days) - presignedPutObject(bucketName, objectName, expires, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - return this.presignedUrl('PUT', bucketName, objectName, expires, cb) - } - - // return PostPolicy object - newPostPolicy() { - return new PostPolicy() - } - - // presignedPostPolicy can be used in situations where we want more control on the upload than what - // presignedPutObject() provides. i.e Using presignedPostPolicy we will be able to put policy restrictions - // on the object's `name` `bucket` `expiry` `Content-Type` `Content-Disposition` `metaData` - presignedPostPolicy(postPolicy, cb) { - if (this.anonymous) { - throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests') - } - if (!isObject(postPolicy)) { - throw new TypeError('postPolicy should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - this.getBucketRegion(postPolicy.formData.bucket, (e, region) => { - if (e) { - return cb(e) - } - var date = new Date() - var dateStr = makeDateLong(date) - - this.checkAndRefreshCreds() - - if (!postPolicy.policy.expiration) { - // 'expiration' is mandatory field for S3. - // Set default expiration date of 7 days. - var expires = new Date() - expires.setSeconds(24 * 60 * 60 * 7) - postPolicy.setExpires(expires) - } - - postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr]) - postPolicy.formData['x-amz-date'] = dateStr - - postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256']) - postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' - - postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)]) - postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date) - - if (this.sessionToken) { - postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken]) - postPolicy.formData['x-amz-security-token'] = this.sessionToken - } - - var policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64') - - postPolicy.formData.policy = policyBase64 - - var signature = postPresignSignatureV4(region, date, this.secretKey, policyBase64) - - postPolicy.formData['x-amz-signature'] = signature - var opts = {} - opts.region = region - opts.bucketName = postPolicy.formData.bucket - var reqOptions = this.getRequestOptions(opts) - var portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}` - var urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}` - cb(null, { postURL: urlStr, formData: postPolicy.formData }) - }) - } - - // Calls implemented below are related to multipart. - - // Initiate a new multipart upload. - initiateNewMultipartUpload(bucketName, objectName, metaData, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(metaData)) { - throw new errors.InvalidObjectNameError('contentType should be of type "object"') - } - var method = 'POST' - let headers = Object.assign({}, metaData) - var query = 'uploads' - this.makeRequest({ method, bucketName, objectName, query, headers }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getInitiateMultipartTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (uploadId) => cb(null, uploadId)) - }) - } - - // Complete the multipart upload. After all the parts are uploaded issuing - // this call will aggregate the parts on the server into a single object. - completeMultipartUpload(bucketName, objectName, uploadId, etags, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isObject(etags)) { - throw new TypeError('etags should be of type "Array"') - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - - var method = 'POST' - var query = `uploadId=${uriEscape(uploadId)}` - - var parts = [] - - etags.forEach((element) => { - parts.push({ - Part: [ - { - PartNumber: element.part, - }, - { - ETag: element.etag, - }, - ], - }) - }) - - var payloadObject = { CompleteMultipartUpload: parts } - var payload = Xml(payloadObject) - - this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getCompleteMultipartTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (result) => { - if (result.errCode) { - // Multipart Complete API returns an error XML after a 200 http status - cb(new errors.S3Error(result.errMessage)) - } else { - const completeMultipartResult = { - etag: result.etag, - versionId: getVersionId(response.headers), - } - cb(null, completeMultipartResult) - } - }) - }) - } - - // Get part-info of all parts of an incomplete upload specified by uploadId. - listParts(bucketName, objectName, uploadId, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - var parts = [] - var listNext = (marker) => { - this.listPartsQuery(bucketName, objectName, uploadId, marker, (e, result) => { - if (e) { - cb(e) - return - } - parts = parts.concat(result.parts) - if (result.isTruncated) { - listNext(result.marker) - return - } - cb(null, parts) - }) - } - listNext(0) - } - - // Called by listParts to fetch a batch of part-info - listPartsQuery(bucketName, objectName, uploadId, marker, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isNumber(marker)) { - throw new TypeError('marker should be of type "number"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('uploadId cannot be empty') - } - var query = '' - if (marker && marker !== 0) { - query += `part-number-marker=${marker}&` - } - query += `uploadId=${uriEscape(uploadId)}` - - var method = 'GET' - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getListPartsTransformer() - pipesetup(response, transformer) - .on('error', (e) => cb(e)) - .on('data', (data) => cb(null, data)) - }) - } - - // Called by listIncompleteUploads to fetch a batch of incomplete uploads. - listIncompleteUploadsQuery(bucketName, prefix, keyMarker, uploadIdMarker, delimiter) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isString(prefix)) { - throw new TypeError('prefix should be of type "string"') - } - if (!isString(keyMarker)) { - throw new TypeError('keyMarker should be of type "string"') - } - if (!isString(uploadIdMarker)) { - throw new TypeError('uploadIdMarker should be of type "string"') - } - if (!isString(delimiter)) { - throw new TypeError('delimiter should be of type "string"') - } - var queries = [] - queries.push(`prefix=${uriEscape(prefix)}`) - queries.push(`delimiter=${uriEscape(delimiter)}`) - - if (keyMarker) { - keyMarker = uriEscape(keyMarker) - queries.push(`key-marker=${keyMarker}`) - } - if (uploadIdMarker) { - queries.push(`upload-id-marker=${uploadIdMarker}`) - } - - var maxUploads = 1000 - queries.push(`max-uploads=${maxUploads}`) - queries.sort() - queries.unshift('uploads') - var query = '' - if (queries.length > 0) { - query = `${queries.join('&')}` - } - var method = 'GET' - var transformer = transformers.getListMultipartTransformer() - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return transformer.emit('error', e) - } - pipesetup(response, transformer) - }) - return transformer - } - - // Find uploadId of an incomplete upload. - findUploadId(bucketName, objectName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isFunction(cb)) { - throw new TypeError('cb should be of type "function"') - } - var latestUpload - var listNext = (keyMarker, uploadIdMarker) => { - this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '') - .on('error', (e) => cb(e)) - .on('data', (result) => { - result.uploads.forEach((upload) => { - if (upload.key === objectName) { - if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) { - latestUpload = upload - return - } - } - }) - if (result.isTruncated) { - listNext(result.nextKeyMarker, result.nextUploadIdMarker) - return - } - if (latestUpload) { - return cb(null, latestUpload.uploadId) - } - cb(null, undefined) - }) - } - listNext('', '') - } - - // Returns a function that can be used for uploading objects. - // If multipart === true, it returns function that is used to upload - // a part of the multipart. - getUploader(bucketName, objectName, metaData, multipart) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isBoolean(multipart)) { - throw new TypeError('multipart should be of type "boolean"') - } - if (!isObject(metaData)) { - throw new TypeError('metadata should be of type "object"') - } - - var validate = (stream, length, sha256sum, md5sum, cb) => { - if (!isReadableStream(stream)) { - throw new TypeError('stream should be of type "Stream"') - } - if (!isNumber(length)) { - throw new TypeError('length should be of type "number"') - } - if (!isString(sha256sum)) { - throw new TypeError('sha256sum should be of type "string"') - } - if (!isString(md5sum)) { - throw new TypeError('md5sum should be of type "string"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - } - var simpleUploader = (...args) => { - validate(...args) - var query = '' - upload(query, ...args) - } - var multipartUploader = (uploadId, partNumber, ...rest) => { - if (!isString(uploadId)) { - throw new TypeError('uploadId should be of type "string"') - } - if (!isNumber(partNumber)) { - throw new TypeError('partNumber should be of type "number"') - } - if (!uploadId) { - throw new errors.InvalidArgumentError('Empty uploadId') - } - if (!partNumber) { - throw new errors.InvalidArgumentError('partNumber cannot be 0') - } - validate(...rest) - var query = `partNumber=${partNumber}&uploadId=${uriEscape(uploadId)}` - upload(query, ...rest) - } - var upload = (query, stream, length, sha256sum, md5sum, cb) => { - var method = 'PUT' - let headers = { 'Content-Length': length } - - if (!multipart) { - headers = Object.assign({}, metaData, headers) - } - - if (!this.enableSHA256) { - headers['Content-MD5'] = md5sum - } - this.makeRequestStream( - { method, bucketName, objectName, query, headers }, - stream, - sha256sum, - [200], - '', - true, - (e, response) => { - if (e) { - return cb(e) - } - const result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - cb(null, result) - }, - ) - } - if (multipart) { - return multipartUploader - } - return simpleUploader - } - - // Remove all the notification configurations in the S3 provider - setBucketNotification(bucketName, config, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(config)) { - throw new TypeError('notification config should be of type "Object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'PUT' - var query = 'notification' - var builder = new xml2js.Builder({ - rootName: 'NotificationConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - var payload = builder.buildObject(config) - this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) - } - - removeAllBucketNotification(bucketName, cb) { - this.setBucketNotification(bucketName, new NotificationConfig(), cb) - } - - // Return the list of notification configurations stored - // in the S3 provider - getBucketNotification(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - var method = 'GET' - var query = 'notification' - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - var transformer = transformers.getBucketNotificationTransformer() - var bucketNotification - pipesetup(response, transformer) - .on('data', (result) => (bucketNotification = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, bucketNotification)) - }) - } - - // Listens for bucket notifications. Returns an EventEmitter. - listenBucketNotification(bucketName, prefix, suffix, events) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isString(prefix)) { - throw new TypeError('prefix must be of type string') - } - if (!isString(suffix)) { - throw new TypeError('suffix must be of type string') - } - if (!isArray(events)) { - throw new TypeError('events must be of type Array') - } - let listener = new NotificationPoller(this, bucketName, prefix, suffix, events) - listener.start() - - return listener - } - - getBucketVersioning(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - var method = 'GET' - var query = 'versioning' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let versionConfig = Buffer.from('') - pipesetup(response, transformers.bucketVersioningTransformer()) - .on('data', (data) => { - versionConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, versionConfig) - }) - }) - } - - setBucketVersioning(bucketName, versionConfig, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!Object.keys(versionConfig).length) { - throw new errors.InvalidArgumentError('versionConfig should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - var method = 'PUT' - var query = 'versioning' - var builder = new xml2js.Builder({ - rootName: 'VersioningConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - var payload = builder.buildObject(versionConfig) - - this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb) - } - - /** To set Tags on a bucket or object based on the params - * __Arguments__ - * taggingParams _object_ Which contains the following properties - * bucketName _string_, - * objectName _string_ (Optional), - * tags _object_ of the form {'':'','':''} - * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setTagging(taggingParams) { - const { bucketName, objectName, tags, putOpts = {}, cb } = taggingParams - const method = 'PUT' - let query = 'tagging' - - if (putOpts && putOpts.versionId) { - query = `${query}&versionId=${putOpts.versionId}` - } - const tagsList = [] - for (const [key, value] of Object.entries(tags)) { - tagsList.push({ Key: key, Value: value }) - } - const taggingConfig = { - Tagging: { - TagSet: { - Tag: tagsList, - }, - }, - } - const encoder = new TextEncoder() - const headers = {} - const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } }) - let payload = builder.buildObject(taggingConfig) - payload = encoder.encode(payload) - headers['Content-MD5'] = toMd5(payload) - const requestOptions = { method, bucketName, query, headers } - - if (objectName) { - requestOptions['objectName'] = objectName - } - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest(requestOptions, payload, [200], '', false, cb) - } - - /** Set Tags on a Bucket - * __Arguments__ - * bucketName _string_ - * tags _object_ of the form {'':'','':''} - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setBucketTagging(bucketName, tags, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(tags)) { - throw new errors.InvalidArgumentError('tags should be of type "object"') - } - if (Object.keys(tags).length > 10) { - throw new errors.InvalidArgumentError('maximum tags allowed is 10"') - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - return this.setTagging({ bucketName, tags, cb }) - } - - /** Set Tags on an Object - * __Arguments__ - * bucketName _string_ - * objectName _string_ - * * tags _object_ of the form {'':'','':''} - * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setObjectTagging(bucketName, objectName, tags, putOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - - if (isFunction(putOpts)) { - cb = putOpts - putOpts = {} - } - - if (!isObject(tags)) { - throw new errors.InvalidArgumentError('tags should be of type "object"') - } - if (Object.keys(tags).length > 10) { - throw new errors.InvalidArgumentError('Maximum tags allowed is 10"') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - return this.setTagging({ bucketName, objectName, tags, putOpts, cb }) - } - - /** Remove Tags on an Bucket/Object based on params - * __Arguments__ - * bucketName _string_ - * objectName _string_ (optional) - * removeOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeTagging({ bucketName, objectName, removeOpts, cb }) { - const method = 'DELETE' - let query = 'tagging' - - if (removeOpts && Object.keys(removeOpts).length && removeOpts.versionId) { - query = `${query}&versionId=${removeOpts.versionId}` - } - const requestOptions = { method, bucketName, objectName, query } - - if (objectName) { - requestOptions['objectName'] = objectName - } - this.makeRequest(requestOptions, '', [200, 204], '', true, cb) - } - - /** Remove Tags associated with a bucket - * __Arguments__ - * bucketName _string_ - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeBucketTagging(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - return this.removeTagging({ bucketName, cb }) - } - - /** Remove tags associated with an object - * __Arguments__ - * bucketName _string_ - * objectName _string_ - * removeOpts _object_ (Optional) e.g. {VersionID:"my-object-version-id"} - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeObjectTagging(bucketName, objectName, removeOpts, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - if (isFunction(removeOpts)) { - cb = removeOpts - removeOpts = {} - } - if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) { - throw new errors.InvalidArgumentError('removeOpts should be of type "object"') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - return this.removeTagging({ bucketName, objectName, removeOpts, cb }) - } - - /** Get Tags associated with a Bucket - * __Arguments__ - * bucketName _string_ - * `cb(error, tags)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - getBucketTagging(bucketName, cb) { - const method = 'GET' - const query = 'tagging' - const requestOptions = { method, bucketName, query } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - var transformer = transformers.getTagsTransformer() - if (e) { - return cb(e) - } - let tagsList - pipesetup(response, transformer) - .on('data', (result) => (tagsList = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, tagsList)) - }) - } - - /** Get the tags associated with a bucket OR an object - * bucketName _string_ - * objectName _string_ (Optional) - * getOpts _object_ (Optional) e.g {versionId:"my-object-version-id"} - * `cb(error, tags)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - getObjectTagging(bucketName, objectName, getOpts = {}, cb = () => false) { - const method = 'GET' - let query = 'tagging' - - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) - } - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - if (!isObject(getOpts)) { - throw new errors.InvalidArgumentError('getOpts should be of type "object"') - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - if (getOpts && getOpts.versionId) { - query = `${query}&versionId=${getOpts.versionId}` - } - const requestOptions = { method, bucketName, query } - if (objectName) { - requestOptions['objectName'] = objectName - } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - const transformer = transformers.getTagsTransformer() - if (e) { - return cb(e) - } - let tagsList - pipesetup(response, transformer) - .on('data', (result) => (tagsList = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, tagsList)) - }) - } - - /** - * Apply lifecycle configuration on a bucket. - * bucketName _string_ - * policyConfig _object_ a valid policy configuration object. - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - applyBucketLifecycle(bucketName, policyConfig, cb) { - const method = 'PUT' - const query = 'lifecycle' - - const encoder = new TextEncoder() - const headers = {} - const builder = new xml2js.Builder({ - rootName: 'LifecycleConfiguration', - headless: true, - renderOpts: { pretty: false }, - }) - let payload = builder.buildObject(policyConfig) - payload = encoder.encode(payload) - const requestOptions = { method, bucketName, query, headers } - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest(requestOptions, payload, [200], '', false, cb) - } - - /** Remove lifecycle configuration of a bucket. - * bucketName _string_ - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - removeBucketLifecycle(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'DELETE' - const query = 'lifecycle' - this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) - } - - /** Set/Override lifecycle configuration on a bucket. if the configuration is empty, it removes the configuration. - * bucketName _string_ - * lifeCycleConfig _object_ one of the following values: (null or '') to remove the lifecycle configuration. or a valid lifecycle configuration - * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. - */ - setBucketLifecycle(bucketName, lifeCycleConfig = null, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (_.isEmpty(lifeCycleConfig)) { - this.removeBucketLifecycle(bucketName, cb) - } else { - this.applyBucketLifecycle(bucketName, lifeCycleConfig, cb) - } - } - - /** Get lifecycle configuration on a bucket. - * bucketName _string_ - * `cb(config)` _function_ - callback function with lifecycle configuration as the error argument. - */ - getBucketLifecycle(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'GET' - const query = 'lifecycle' - const requestOptions = { method, bucketName, query } - - this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - const transformer = transformers.lifecycleTransformer() - if (e) { - return cb(e) - } - let lifecycleConfig - pipesetup(response, transformer) - .on('data', (result) => (lifecycleConfig = result)) - .on('error', (e) => cb(e)) - .on('end', () => cb(null, lifecycleConfig)) - }) - } - - setObjectLockConfig(bucketName, lockConfigOpts = {}, cb) { - const retentionModes = [RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE] - const validUnits = [RETENTION_VALIDITY_UNITS.DAYS, RETENTION_VALIDITY_UNITS.YEARS] - - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - - if (lockConfigOpts.mode && !retentionModes.includes(lockConfigOpts.mode)) { - throw new TypeError(`lockConfigOpts.mode should be one of ${retentionModes}`) - } - if (lockConfigOpts.unit && !validUnits.includes(lockConfigOpts.unit)) { - throw new TypeError(`lockConfigOpts.unit should be one of ${validUnits}`) - } - if (lockConfigOpts.validity && !isNumber(lockConfigOpts.validity)) { - throw new TypeError(`lockConfigOpts.validity should be a number`) - } - - const method = 'PUT' - const query = 'object-lock' - - let config = { - ObjectLockEnabled: 'Enabled', - } - const configKeys = Object.keys(lockConfigOpts) - // Check if keys are present and all keys are present. - if (configKeys.length > 0) { - if (_.difference(configKeys, ['unit', 'mode', 'validity']).length !== 0) { - throw new TypeError( - `lockConfigOpts.mode,lockConfigOpts.unit,lockConfigOpts.validity all the properties should be specified.`, - ) - } else { - config.Rule = { - DefaultRetention: {}, - } - if (lockConfigOpts.mode) { - config.Rule.DefaultRetention.Mode = lockConfigOpts.mode - } - if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.DAYS) { - config.Rule.DefaultRetention.Days = lockConfigOpts.validity - } else if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.YEARS) { - config.Rule.DefaultRetention.Years = lockConfigOpts.validity - } - } - } - - const builder = new xml2js.Builder({ - rootName: 'ObjectLockConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - const payload = builder.buildObject(config) - - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getObjectLockConfig(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'object-lock' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let objectLockConfig = Buffer.from('') - pipesetup(response, transformers.objectLockTransformer()) - .on('data', (data) => { - objectLockConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, objectLockConfig) - }) - }) - } - - putObjectRetention(bucketName, objectName, retentionOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(retentionOpts)) { - throw new errors.InvalidArgumentError('retentionOpts should be of type "object"') - } else { - if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) { - throw new errors.InvalidArgumentError('Invalid value for governanceBypass', retentionOpts.governanceBypass) - } - if ( - retentionOpts.mode && - ![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode) - ) { - throw new errors.InvalidArgumentError('Invalid object retention mode ', retentionOpts.mode) - } - if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) { - throw new errors.InvalidArgumentError('Invalid value for retainUntilDate', retentionOpts.retainUntilDate) - } - if (retentionOpts.versionId && !isString(retentionOpts.versionId)) { - throw new errors.InvalidArgumentError('Invalid value for versionId', retentionOpts.versionId) - } - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'PUT' - let query = 'retention' - - const headers = {} - if (retentionOpts.governanceBypass) { - headers['X-Amz-Bypass-Governance-Retention'] = true - } - - const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true }) - const params = {} - - if (retentionOpts.mode) { - params.Mode = retentionOpts.mode - } - if (retentionOpts.retainUntilDate) { - params.RetainUntilDate = retentionOpts.retainUntilDate - } - if (retentionOpts.versionId) { - query += `&versionId=${retentionOpts.versionId}` - } - - let payload = builder.buildObject(params) - - headers['Content-MD5'] = toMd5(payload) - this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200, 204], '', false, cb) - } - - getObjectRetention(bucketName, objectName, getOpts, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!isObject(getOpts)) { - throw new errors.InvalidArgumentError('callback should be of type "object"') - } else if (getOpts.versionId && !isString(getOpts.versionId)) { - throw new errors.InvalidArgumentError('VersionID should be of type "string"') - } - if (cb && !isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - let query = 'retention' - if (getOpts.versionId) { - query += `&versionId=${getOpts.versionId}` - } - - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let retentionConfig = Buffer.from('') - pipesetup(response, transformers.objectRetentionTransformer()) - .on('data', (data) => { - retentionConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, retentionConfig) - }) - }) - } - - setBucketEncryption(bucketName, encryptionConfig, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - - if (isFunction(encryptionConfig)) { - cb = encryptionConfig - encryptionConfig = null - } - - if (!_.isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) { - throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed.: ' + encryptionConfig.Rule) - } - if (cb && !isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - let encryptionObj = encryptionConfig - if (_.isEmpty(encryptionConfig)) { - encryptionObj = { - // Default MinIO Server Supported Rule - Rule: [ - { - ApplyServerSideEncryptionByDefault: { - SSEAlgorithm: 'AES256', - }, - }, - ], - } - } - - let method = 'PUT' - let query = 'encryption' - let builder = new xml2js.Builder({ - rootName: 'ServerSideEncryptionConfiguration', - renderOpts: { pretty: false }, - headless: true, - }) - let payload = builder.buildObject(encryptionObj) - - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getBucketEncryption(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'encryption' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let bucketEncConfig = Buffer.from('') - pipesetup(response, transformers.bucketEncryptionTransformer()) - .on('data', (data) => { - bucketEncConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, bucketEncConfig) - }) - }) - } - removeBucketEncryption(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'DELETE' - const query = 'encryption' - - this.makeRequest({ method, bucketName, query }, '', [204], '', false, cb) - } - - setBucketReplication(bucketName, replicationConfig = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isObject(replicationConfig)) { - throw new errors.InvalidArgumentError('replicationConfig should be of type "object"') - } else { - if (_.isEmpty(replicationConfig.role)) { - throw new errors.InvalidArgumentError('Role cannot be empty') - } else if (replicationConfig.role && !isString(replicationConfig.role)) { - throw new errors.InvalidArgumentError('Invalid value for role', replicationConfig.role) - } - if (_.isEmpty(replicationConfig.rules)) { - throw new errors.InvalidArgumentError('Minimum one replication rule must be specified') - } - } - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'PUT' - let query = 'replication' - const headers = {} - - const replicationParamsConfig = { - ReplicationConfiguration: { - Role: replicationConfig.role, - Rule: replicationConfig.rules, - }, - } - - const builder = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true }) - - let payload = builder.buildObject(replicationParamsConfig) - - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, query, headers }, payload, [200], '', false, cb) - } - - getBucketReplication(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - const method = 'GET' - const query = 'replication' - - this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let replicationConfig = Buffer.from('') - pipesetup(response, transformers.replicationConfigTransformer()) - .on('data', (data) => { - replicationConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, replicationConfig) - }) - }) - } - - removeBucketReplication(bucketName, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - const method = 'DELETE' - const query = 'replication' - this.makeRequest({ method, bucketName, query }, '', [200, 204], '', false, cb) - } - - getObjectLegalHold(bucketName, objectName, getOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - if (isFunction(getOpts)) { - cb = getOpts - getOpts = {} - } - - if (!isObject(getOpts)) { - throw new TypeError('getOpts should be of type "Object"') - } else if (Object.keys(getOpts).length > 0 && getOpts.versionId && !isString(getOpts.versionId)) { - throw new TypeError('versionId should be of type string.:', getOpts.versionId) - } - - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - const method = 'GET' - let query = 'legal-hold' - - if (getOpts.versionId) { - query += `&versionId=${getOpts.versionId}` - } - - this.makeRequest({ method, bucketName, objectName, query }, '', [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let legalHoldConfig = Buffer.from('') - pipesetup(response, transformers.objectLegalHoldTransformer()) - .on('data', (data) => { - legalHoldConfig = data - }) - .on('error', cb) - .on('end', () => { - cb(null, legalHoldConfig) - }) - }) - } - - setObjectLegalHold(bucketName, objectName, setOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - - const defaultOpts = { - status: LEGAL_HOLD_STATUS.ENABLED, - } - if (isFunction(setOpts)) { - cb = setOpts - setOpts = defaultOpts - } - - if (!isObject(setOpts)) { - throw new TypeError('setOpts should be of type "Object"') - } else { - if (![LEGAL_HOLD_STATUS.ENABLED, LEGAL_HOLD_STATUS.DISABLED].includes(setOpts.status)) { - throw new TypeError('Invalid status: ' + setOpts.status) - } - if (setOpts.versionId && !setOpts.versionId.length) { - throw new TypeError('versionId should be of type string.:' + setOpts.versionId) - } - } - - if (!isFunction(cb)) { - throw new errors.InvalidArgumentError('callback should be of type "function"') - } - - if (_.isEmpty(setOpts)) { - setOpts = { - defaultOpts, - } - } - - const method = 'PUT' - let query = 'legal-hold' - - if (setOpts.versionId) { - query += `&versionId=${setOpts.versionId}` - } - - let config = { - Status: setOpts.status, - } - - const builder = new xml2js.Builder({ rootName: 'LegalHold', renderOpts: { pretty: false }, headless: true }) - const payload = builder.buildObject(config) - const headers = {} - headers['Content-MD5'] = toMd5(payload) - - this.makeRequest({ method, bucketName, objectName, query, headers }, payload, [200], '', false, cb) - } - async setCredentialsProvider(credentialsProvider) { - if (!(credentialsProvider instanceof CredentialProvider)) { - throw new Error('Unable to get credentials. Expected instance of CredentialProvider') - } - this.credentialsProvider = credentialsProvider - await this.checkAndRefreshCreds() - } - - async checkAndRefreshCreds() { - if (this.credentialsProvider) { - return await this.fetchCredentials() - } - } - - async fetchCredentials() { - if (this.credentialsProvider) { - const credentialsConf = await this.credentialsProvider.getCredentials() - if (credentialsConf) { - this.accessKey = credentialsConf.getAccessKey() - this.secretKey = credentialsConf.getSecretKey() - this.sessionToken = credentialsConf.getSessionToken() - } else { - throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') - } - } else { - throw new Error('Unable to get credentials. Expected instance of BaseCredentialsProvider') - } - } - - /** - * Internal Method to abort a multipart upload request in case of any errors. - * @param bucketName __string__ Bucket Name - * @param objectName __string__ Object Name - * @param uploadId __string__ id of a multipart upload to cancel during compose object sequence. - * @param cb __function__ callback function - */ - abortMultipartUpload(bucketName, objectName, uploadId, cb) { - const method = 'DELETE' - let query = `uploadId=${uploadId}` - - const requestOptions = { method, bucketName, objectName: objectName, query } - this.makeRequest(requestOptions, '', [204], '', false, cb) - } - - /** - * Internal method to upload a part during compose object. - * @param partConfig __object__ contains the following. - * bucketName __string__ - * objectName __string__ - * uploadID __string__ - * partNumber __number__ - * headers __object__ - * @param cb called with null incase of error. - */ - uploadPartCopy(partConfig, cb) { - const { bucketName, objectName, uploadID, partNumber, headers } = partConfig - - const method = 'PUT' - let query = `uploadId=${uploadID}&partNumber=${partNumber}` - const requestOptions = { method, bucketName, objectName: objectName, query, headers } - return this.makeRequest(requestOptions, '', [200], '', true, (e, response) => { - let partCopyResult = Buffer.from('') - if (e) { - return cb(e) - } - pipesetup(response, transformers.uploadPartTransformer()) - .on('data', (data) => { - partCopyResult = data - }) - .on('error', cb) - .on('end', () => { - let uploadPartCopyRes = { - etag: sanitizeETag(partCopyResult.ETag), - key: objectName, - part: partNumber, - } - - cb(null, uploadPartCopyRes) - }) - }) - } - - composeObject(destObjConfig = {}, sourceObjList = [], cb) { - const me = this // many async flows. so store the ref. - const sourceFilesLength = sourceObjList.length - - if (!isArray(sourceObjList)) { - throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') - } - if (!(destObjConfig instanceof CopyDestinationOptions)) { - throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') - } - - if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, - ) - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - for (let i = 0; i < sourceFilesLength; i++) { - if (!sourceObjList[i].validate()) { - return false - } - } - - if (!destObjConfig.validate()) { - return false - } - - const getStatOptions = (srcConfig) => { - let statOpts = {} - if (!_.isEmpty(srcConfig.VersionID)) { - statOpts = { - versionId: srcConfig.VersionID, - } - } - return statOpts - } - const srcObjectSizes = [] - let totalSize = 0 - let totalParts = 0 - - const sourceObjStats = sourceObjList.map((srcItem) => - me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), - ) - - return Promise.all(sourceObjStats) - .then((srcObjectInfos) => { - const validatedStats = srcObjectInfos.map((resItemStat, index) => { - const srcConfig = sourceObjList[index] - - let srcCopySize = resItemStat.size - // Check if a segment is specified, and if so, is the - // segment within object bounds? - if (srcConfig.MatchRange) { - // Since range is specified, - // 0 <= src.srcStart <= src.srcEnd - // so only invalid case to check is: - const srcStart = srcConfig.Start - const srcEnd = srcConfig.End - if (srcEnd >= srcCopySize || srcStart < 0) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, - ) - } - srcCopySize = srcEnd - srcStart + 1 - } - - // Only the last source may be less than `absMinPartSize` - if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { - throw new errors.InvalidArgumentError( - `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, - ) - } - - // Is data to copy too large? - totalSize += srcCopySize - if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { - throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) - } - - // record source size - srcObjectSizes[index] = srcCopySize - - // calculate parts needed for current source - totalParts += partsRequired(srcCopySize) - // Do we need more parts than we are allowed? - if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { - throw new errors.InvalidArgumentError( - `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, - ) - } - - return resItemStat - }) - - if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { - return this.copyObject(sourceObjList[0], destObjConfig, cb) // use copyObjectV2 - } - - // preserve etag to avoid modification of object while copying. - for (let i = 0; i < sourceFilesLength; i++) { - sourceObjList[i].MatchETag = validatedStats[i].etag - } - - const splitPartSizeList = validatedStats.map((resItemStat, idx) => { - const calSize = calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) - return calSize - }) - - function getUploadPartConfigList(uploadId) { - const uploadPartConfigList = [] - - splitPartSizeList.forEach((splitSize, splitIndex) => { - const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize - - let partIndex = splitIndex + 1 // part index starts from 1. - const totalUploads = Array.from(startIdx) - - const headers = sourceObjList[splitIndex].getHeaders() - - totalUploads.forEach((splitStart, upldCtrIdx) => { - let splitEnd = endIdx[upldCtrIdx] - - const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` - headers['x-amz-copy-source'] = `${sourceObj}` - headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` - - const uploadPartConfig = { - bucketName: destObjConfig.Bucket, - objectName: destObjConfig.Object, - uploadID: uploadId, - partNumber: partIndex, - headers: headers, - sourceObj: sourceObj, - } - - uploadPartConfigList.push(uploadPartConfig) - }) - }) - - return uploadPartConfigList - } - - const performUploadParts = (uploadId) => { - const uploadList = getUploadPartConfigList(uploadId) - - async.map(uploadList, me.uploadPartCopy.bind(me), (err, res) => { - if (err) { - return this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, cb) - } - const partsDone = res.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) - return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone, cb) - }) - } - - const newUploadHeaders = destObjConfig.getHeaders() - - me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders, (err, uploadId) => { - if (err) { - return cb(err, null) - } - performUploadParts(uploadId) - }) - }) - .catch((error) => { - cb(error, null) - }) - } - selectObjectContent(bucketName, objectName, selectOpts = {}, cb) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) - } - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) - } - if (!_.isEmpty(selectOpts)) { - if (!isString(selectOpts.expression)) { - throw new TypeError('sqlExpression should be of type "string"') - } - if (!_.isEmpty(selectOpts.inputSerialization)) { - if (!isObject(selectOpts.inputSerialization)) { - throw new TypeError('inputSerialization should be of type "object"') - } - } else { - throw new TypeError('inputSerialization is required') - } - if (!_.isEmpty(selectOpts.outputSerialization)) { - if (!isObject(selectOpts.outputSerialization)) { - throw new TypeError('outputSerialization should be of type "object"') - } - } else { - throw new TypeError('outputSerialization is required') - } - } else { - throw new TypeError('valid select configuration is required') - } - - if (!isFunction(cb)) { - throw new TypeError('callback should be of type "function"') - } - - const method = 'POST' - let query = `select` - query += '&select-type=2' - - const config = [ - { - Expression: selectOpts.expression, - }, - { - ExpressionType: selectOpts.expressionType || 'SQL', - }, - { - InputSerialization: [selectOpts.inputSerialization], - }, - { - OutputSerialization: [selectOpts.outputSerialization], - }, - ] - - // Optional - if (selectOpts.requestProgress) { - config.push({ RequestProgress: selectOpts.requestProgress }) - } - // Optional - if (selectOpts.scanRange) { - config.push({ ScanRange: selectOpts.scanRange }) - } - - const builder = new xml2js.Builder({ - rootName: 'SelectObjectContentRequest', - renderOpts: { pretty: false }, - headless: true, - }) - const payload = builder.buildObject(config) - - this.makeRequest({ method, bucketName, objectName, query }, payload, [200], '', true, (e, response) => { - if (e) { - return cb(e) - } - - let selectResult - pipesetup(response, transformers.selectObjectContentTransformer()) - .on('data', (data) => { - selectResult = parseSelectObjectContentResponse(data) - }) - .on('error', cb) - .on('end', () => { - cb(null, selectResult) - }) - }) - } - - get extensions() { - if (!this.clientExtensions) { - this.clientExtensions = new extensions(this) - } - return this.clientExtensions - } -} - -// Promisify various public-facing APIs on the Client module. -Client.prototype.makeBucket = promisify(Client.prototype.makeBucket) -Client.prototype.listBuckets = promisify(Client.prototype.listBuckets) -Client.prototype.bucketExists = promisify(Client.prototype.bucketExists) -Client.prototype.removeBucket = promisify(Client.prototype.removeBucket) - -Client.prototype.getObject = promisify(Client.prototype.getObject) -Client.prototype.getPartialObject = promisify(Client.prototype.getPartialObject) -Client.prototype.fGetObject = promisify(Client.prototype.fGetObject) -Client.prototype.putObject = promisify(Client.prototype.putObject) -Client.prototype.fPutObject = promisify(Client.prototype.fPutObject) -Client.prototype.copyObject = promisify(Client.prototype.copyObject) -Client.prototype.statObject = promisify(Client.prototype.statObject) -Client.prototype.removeObject = promisify(Client.prototype.removeObject) -Client.prototype.removeObjects = promisify(Client.prototype.removeObjects) - -Client.prototype.presignedUrl = promisify(Client.prototype.presignedUrl) -Client.prototype.presignedGetObject = promisify(Client.prototype.presignedGetObject) -Client.prototype.presignedPutObject = promisify(Client.prototype.presignedPutObject) -Client.prototype.presignedPostPolicy = promisify(Client.prototype.presignedPostPolicy) -Client.prototype.getBucketNotification = promisify(Client.prototype.getBucketNotification) -Client.prototype.setBucketNotification = promisify(Client.prototype.setBucketNotification) -Client.prototype.removeAllBucketNotification = promisify(Client.prototype.removeAllBucketNotification) -Client.prototype.getBucketPolicy = promisify(Client.prototype.getBucketPolicy) -Client.prototype.setBucketPolicy = promisify(Client.prototype.setBucketPolicy) -Client.prototype.removeIncompleteUpload = promisify(Client.prototype.removeIncompleteUpload) -Client.prototype.getBucketVersioning = promisify(Client.prototype.getBucketVersioning) -Client.prototype.setBucketVersioning = promisify(Client.prototype.setBucketVersioning) -Client.prototype.setBucketTagging = promisify(Client.prototype.setBucketTagging) -Client.prototype.removeBucketTagging = promisify(Client.prototype.removeBucketTagging) -Client.prototype.getBucketTagging = promisify(Client.prototype.getBucketTagging) -Client.prototype.setObjectTagging = promisify(Client.prototype.setObjectTagging) -Client.prototype.removeObjectTagging = promisify(Client.prototype.removeObjectTagging) -Client.prototype.getObjectTagging = promisify(Client.prototype.getObjectTagging) -Client.prototype.setBucketLifecycle = promisify(Client.prototype.setBucketLifecycle) -Client.prototype.getBucketLifecycle = promisify(Client.prototype.getBucketLifecycle) -Client.prototype.removeBucketLifecycle = promisify(Client.prototype.removeBucketLifecycle) -Client.prototype.setObjectLockConfig = promisify(Client.prototype.setObjectLockConfig) -Client.prototype.getObjectLockConfig = promisify(Client.prototype.getObjectLockConfig) -Client.prototype.putObjectRetention = promisify(Client.prototype.putObjectRetention) -Client.prototype.getObjectRetention = promisify(Client.prototype.getObjectRetention) -Client.prototype.setBucketEncryption = promisify(Client.prototype.setBucketEncryption) -Client.prototype.getBucketEncryption = promisify(Client.prototype.getBucketEncryption) -Client.prototype.removeBucketEncryption = promisify(Client.prototype.removeBucketEncryption) -Client.prototype.setBucketReplication = promisify(Client.prototype.setBucketReplication) -Client.prototype.getBucketReplication = promisify(Client.prototype.getBucketReplication) -Client.prototype.removeBucketReplication = promisify(Client.prototype.removeBucketReplication) -Client.prototype.setObjectLegalHold = promisify(Client.prototype.setObjectLegalHold) -Client.prototype.getObjectLegalHold = promisify(Client.prototype.getObjectLegalHold) -Client.prototype.composeObject = promisify(Client.prototype.composeObject) -Client.prototype.selectObjectContent = promisify(Client.prototype.selectObjectContent) - -export class CopyConditions { - constructor() { - this.modified = '' - this.unmodified = '' - this.matchETag = '' - this.matchETagExcept = '' - } - - setModified(date) { - if (!(date instanceof Date)) { - throw new TypeError('date must be of type Date') - } - - this.modified = date.toUTCString() - } - - setUnmodified(date) { - if (!(date instanceof Date)) { - throw new TypeError('date must be of type Date') - } - - this.unmodified = date.toUTCString() - } - - setMatchETag(etag) { - this.matchETag = etag - } - - setMatchETagExcept(etag) { - this.matchETagExcept = etag - } -} - -// Build PostPolicy object that can be signed by presignedPostPolicy -export class PostPolicy { - constructor() { - this.policy = { - conditions: [], - } - this.formData = {} - } - - // set expiration date - setExpires(date) { - if (!date) { - throw new errors.InvalidDateError('Invalid date : cannot be null') - } - this.policy.expiration = date.toISOString() - } - - // set object name - setKey(objectName) { - if (!isValidObjectName(objectName)) { - throw new errors.InvalidObjectNameError(`Invalid object name : ${objectName}`) - } - this.policy.conditions.push(['eq', '$key', objectName]) - this.formData.key = objectName - } - - // set object name prefix, i.e policy allows any keys with this prefix - setKeyStartsWith(prefix) { - if (!isValidPrefix(prefix)) { - throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) - } - this.policy.conditions.push(['starts-with', '$key', prefix]) - this.formData.key = prefix - } - - // set bucket name - setBucket(bucketName) { - if (!isValidBucketName(bucketName)) { - throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) - } - this.policy.conditions.push(['eq', '$bucket', bucketName]) - this.formData.bucket = bucketName - } - - // set Content-Type - setContentType(type) { - if (!type) { - throw new Error('content-type cannot be null') - } - this.policy.conditions.push(['eq', '$Content-Type', type]) - this.formData['Content-Type'] = type - } - - // set Content-Type prefix, i.e image/ allows any image - setContentTypeStartsWith(prefix) { - if (!prefix) { - throw new Error('content-type cannot be null') - } - this.policy.conditions.push(['starts-with', '$Content-Type', prefix]) - this.formData['Content-Type'] = prefix - } - - // set Content-Disposition - setContentDisposition(value) { - if (!value) { - throw new Error('content-disposition cannot be null') - } - this.policy.conditions.push(['eq', '$Content-Disposition', value]) - this.formData['Content-Disposition'] = value - } - - // set minimum/maximum length of what Content-Length can be. - setContentLengthRange(min, max) { - if (min > max) { - throw new Error('min cannot be more than max') - } - if (min < 0) { - throw new Error('min should be > 0') - } - if (max < 0) { - throw new Error('max should be > 0') - } - this.policy.conditions.push(['content-length-range', min, max]) - } - - // set user defined metadata - setUserMetaData(metaData) { - if (!isObject(metaData)) { - throw new TypeError('metadata should be of type "object"') - } - Object.entries(metaData).forEach(([key, value]) => { - const amzMetaDataKey = `x-amz-meta-${key}` - this.policy.conditions.push(['eq', `$${amzMetaDataKey}`, value]) - this.formData[amzMetaDataKey] = value - }) - } -} diff --git a/src/minio.ts b/src/minio.ts new file mode 100644 index 00000000..9ccad980 --- /dev/null +++ b/src/minio.ts @@ -0,0 +1,41 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { TypedClient2 } from './typed-client2.ts' + +export { AssumeRoleProvider } from './AssumeRoleProvider.ts' +export { CopyConditions } from './copyConditions.ts' +export { CredentialProvider } from './CredentialProvider.ts' +export { Credentials } from './Credentials.ts' +export { + CopyDestinationOptions, + CopySourceOptions, + DEFAULT_REGION, + ENCRYPTION_TYPES, + PART_CONSTRAINTS, +} from './helpers.ts' +export type { NotificationEvent, NotificationRecord } from './notification.ts' +export { + buildARN, + CloudFunctionConfig, + NotificationConfig, + NotificationPoller, + QueueConfig, + TopicConfig, +} from './notification.ts' +export { PostPolicy } from './postPolicy.ts' + +export class Client extends TypedClient2 {} diff --git a/src/notification.js b/src/notification.ts similarity index 51% rename from src/notification.js rename to src/notification.ts index 5fe14541..1b26db23 100644 --- a/src/notification.js +++ b/src/notification.ts @@ -16,51 +16,40 @@ import { EventEmitter } from 'node:events' -import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.js' -import * as transformers from './transformers.js' +import jsonLineParser from 'stream-json/jsonl/Parser.js' -// Notification config - array of target configs. -// Target configs can be -// 1. Topic (simple notification service) -// 2. Queue (simple queue service) -// 3. CloudFront (lambda function) -export class NotificationConfig { - add(target) { - let instance = '' - if (target instanceof TopicConfig) { - instance = 'TopicConfiguration' - } - if (target instanceof QueueConfig) { - instance = 'QueueConfiguration' - } - if (target instanceof CloudFunctionConfig) { - instance = 'CloudFunctionConfiguration' - } - if (!this[instance]) { - this[instance] = [] - } - this[instance].push(target) - } -} +import type { Client } from './client.ts' +import { DEFAULT_REGION, pipesetup, uriEscape } from './helpers.ts' + +// TODO: type this + +type Event = unknown // Base class for three supported configs. -class TargetConfig { - setId(id) { +export class TargetConfig { + private Filter?: { S3Key: { FilterRule: { Name: string; Value: string }[] } } + private Event?: Event[] + private Id: any + + setId(id: any) { this.Id = id } - addEvent(newevent) { + + addEvent(newevent: Event) { if (!this.Event) { this.Event = [] } this.Event.push(newevent) } - addFilterSuffix(suffix) { + + addFilterSuffix(suffix: string) { if (!this.Filter) { this.Filter = { S3Key: { FilterRule: [] } } } this.Filter.S3Key.FilterRule.push({ Name: 'suffix', Value: suffix }) } - addFilterPrefix(prefix) { + + addFilterPrefix(prefix: string) { if (!this.Filter) { this.Filter = { S3Key: { FilterRule: [] } } } @@ -70,7 +59,9 @@ class TargetConfig { // 1. Topic (simple notification service) export class TopicConfig extends TargetConfig { - constructor(arn) { + private Topic: string + + constructor(arn: string) { super() this.Topic = arn } @@ -78,7 +69,9 @@ export class TopicConfig extends TargetConfig { // 2. Queue (simple queue service) export class QueueConfig extends TargetConfig { - constructor(arn) { + private Queue: string + + constructor(arn: string) { super() this.Queue = arn } @@ -86,16 +79,44 @@ export class QueueConfig extends TargetConfig { // 3. CloudFront (lambda function) export class CloudFunctionConfig extends TargetConfig { - constructor(arn) { + private CloudFunction: string + + constructor(arn: string) { super() this.CloudFunction = arn } } -export const buildARN = (partition, service, region, accountId, resource) => { - return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource +// Notification config - array of target configs. +// Target configs can be +// 1. Topic (simple notification service) +// 2. Queue (simple queue service) +// 3. CloudFront (lambda function) +export class NotificationConfig { + private TopicConfiguration?: TargetConfig[] + private CloudFunctionConfiguration?: TargetConfig[] + private QueueConfiguration?: TargetConfig[] + + add(target: TargetConfig) { + let instance: TargetConfig[] | undefined + if (target instanceof TopicConfig) { + instance = this.TopicConfiguration ??= [] + } + if (target instanceof QueueConfig) { + instance = this.QueueConfiguration ??= [] + } + if (target instanceof CloudFunctionConfig) { + instance = this.CloudFunctionConfiguration ??= [] + } + if (instance) { + instance.push(target) + } + } } +export const buildARN = (partition: string, service: string, region: string, accountId: string, resource: string) => { + return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource +} export const ObjectCreatedAll = 's3:ObjectCreated:*' export const ObjectCreatedPut = 's3:ObjectCreated:Put' export const ObjectCreatedPost = 's3:ObjectCreated:Post' @@ -105,12 +126,39 @@ export const ObjectRemovedAll = 's3:ObjectRemoved:*' export const ObjectRemovedDelete = 's3:ObjectRemoved:Delete' export const ObjectRemovedDeleteMarkerCreated = 's3:ObjectRemoved:DeleteMarkerCreated' export const ObjectReducedRedundancyLostObject = 's3:ReducedRedundancyLostObject' +export type NotificationEvent = + | 's3:ObjectCreated:*' + | 's3:ObjectCreated:Put' + | 's3:ObjectCreated:Post' + | 's3:ObjectCreated:Copy' + | 's3:ObjectCreated:CompleteMultipartUpload' + | 's3:ObjectRemoved:*' + | 's3:ObjectRemoved:Delete' + | 's3:ObjectRemoved:DeleteMarkerCreated' + | 's3:ReducedRedundancyLostObject' + | 's3:TestEvent' + | 's3:ObjectRestore:Post' + | 's3:ObjectRestore:Completed' + | 's3:Replication:OperationFailedReplication' + | 's3:Replication:OperationMissedThreshold' + | 's3:Replication:OperationReplicatedAfterThreshold' + | 's3:Replication:OperationNotTracked' + | string // put string at least so auto-complete could work +// TODO: type this +export type NotificationRecord = unknown // Poll for notifications, used in #listenBucketNotification. // Listening constitutes repeatedly requesting s3 whether or not any // changes have occurred. export class NotificationPoller extends EventEmitter { - constructor(client, bucketName, prefix, suffix, events) { + private client: Client + private bucketName: string + private prefix: string + private suffix: string + private events: NotificationEvent[] + private ending: boolean + + constructor(client: Client, bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) { super() this.client = client @@ -142,14 +190,14 @@ export class NotificationPoller extends EventEmitter { return } - let method = 'GET' - var queries = [] + const method = 'GET' + const queries = [] if (this.prefix) { - var prefix = uriEscape(this.prefix) + const prefix = uriEscape(this.prefix) queries.push(`prefix=${prefix}`) } if (this.suffix) { - var suffix = uriEscape(this.suffix) + const suffix = uriEscape(this.suffix) queries.push(`suffix=${suffix}`) } if (this.events) { @@ -157,44 +205,59 @@ export class NotificationPoller extends EventEmitter { } queries.sort() - var query = '' + let query = '' if (queries.length > 0) { query = `${queries.join('&')}` } const region = this.client.region || DEFAULT_REGION - this.client.makeRequest({ method, bucketName: this.bucketName, query }, '', [200], region, true, (e, response) => { - if (e) { - return this.emit('error', e) - } - - let transformer = transformers.getNotificationTransformer() - pipesetup(response, transformer) - .on('data', (result) => { - // Data is flushed periodically (every 5 seconds), so we should - // handle it after flushing from the JSON parser. - let records = result.Records - // If null (= no records), change to an empty array. - if (!records) { - records = [] - } - - // Iterate over the notifications and emit them individually. - records.forEach((record) => { - this.emit('notification', record) - }) - - // If we're done, stop. - if (this.ending) { - response.destroy() - } - }) - .on('error', (e) => this.emit('error', e)) - .on('end', () => { - // Do it again, if we haven't cancelled yet. - process.nextTick(() => { - this.checkForChanges() - }) - }) - }) + + this.client + .makeRequestAsync( + { + method, + bucketName: this.bucketName, + query, + }, + '', + [200], + region, + true, + ) + .then( + (response) => { + const asm = jsonLineParser.make() + + pipesetup(response, asm) + .on('data', (data) => { + // Data is flushed periodically (every 5 seconds), so we should + // handle it after flushing from the JSON parser. + let records = data.value.Records + // If null (= no records), change to an empty array. + if (!records) { + records = [] + } + + // Iterate over the notifications and emit them individually. + records.forEach((record: NotificationRecord) => { + this.emit('notification', record) + }) + + // If we're done, stop. + if (this.ending) { + response?.destroy() + } + }) + .on('error', (e) => this.emit('error', e)) + .on('end', () => { + // Do it again, if we haven't cancelled yet. + process.nextTick(() => { + this.checkForChanges() + }) + }) + }, + (e) => { + return this.emit('error', e) + }, + ) } } diff --git a/src/object-uploader.js b/src/object-uploader.js deleted file mode 100644 index 2fdf6606..00000000 --- a/src/object-uploader.js +++ /dev/null @@ -1,289 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as Crypto from 'node:crypto' -import { Transform } from 'node:stream' - -import * as querystring from 'query-string' - -import { getVersionId, sanitizeETag } from './helpers.js' - -// We extend Transform because Writable does not implement ._flush(). -export class ObjectUploader extends Transform { - constructor(client, bucketName, objectName, partSize, metaData, callback) { - super() - this.emptyStream = true - this.client = client - this.bucketName = bucketName - this.objectName = objectName - // The size of each multipart, chunked by BlockStream2. - this.partSize = partSize - // This is the metadata for the object. - this.metaData = metaData - - // Call like: callback(error, {etag, versionId}). - this.callback = callback - - // We need to keep track of what number chunk/part we're on. This increments - // each time _write() is called. Starts with 1, not 0. - this.partNumber = 1 - - // A list of the previously uploaded chunks, for resuming a file upload. This - // will be null if we aren't resuming an upload. - this.oldParts = null - - // Keep track of the etags for aggregating the chunks together later. Each - // etag represents a single chunk of the file. - this.etags = [] - - // This is for the multipart upload request — if null, we're either not initiated - // yet or we're flushing in one packet. - this.id = null - - // Handle errors. - this.on('error', (err) => { - callback(err) - }) - } - - _transform(chunk, encoding, callback) { - this.emptyStream = false - let method = 'PUT' - let headers = { 'Content-Length': chunk.length } - let md5digest = '' - - // Calculate and set Content-MD5 header if SHA256 is not set. - // This will happen only when there is a secure connection to the s3 server. - if (!this.client.enableSHA256) { - md5digest = Crypto.createHash('md5').update(chunk).digest() - headers['Content-MD5'] = md5digest.toString('base64') - } - // We can flush the object in one packet if it fits in one chunk. This is true - // if the chunk size is smaller than the part size, signifying the end of the - // stream. - if (this.partNumber == 1 && chunk.length < this.partSize) { - // PUT the chunk in a single request — use an empty query. - let options = { - method, - // Set user metadata as this is not a multipart upload - headers: Object.assign({}, this.metaData, headers), - query: '', - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - let result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // Give the etag back, we're done! - - process.nextTick(() => { - this.callback(null, result) - }) - - // Because we're sure the stream has ended, allow it to flush and end. - callback() - }) - - return - } - - // If we aren't flushing in one packet, we need to initiate the multipart upload, - // if it hasn't already been done. The write will be buffered until the upload has been - // initiated. - if (this.id === null) { - this.once('ready', () => { - this._transform(chunk, encoding, callback) - }) - - // Check for an incomplete previous upload. - this.client.findUploadId(this.bucketName, this.objectName, (err, id) => { - if (err) { - return this.emit('error', err) - } - - // If no upload ID exists, initiate a new one. - if (!id) { - this.client.initiateNewMultipartUpload(this.bucketName, this.objectName, this.metaData, (err, id) => { - if (err) { - return callback(err) - } - - this.id = id - - // We are now ready to accept new chunks — this will flush the buffered chunk. - this.emit('ready') - }) - - return - } - - this.id = id - - // Retrieve the pre-uploaded parts, if we need to resume the upload. - this.client.listParts(this.bucketName, this.objectName, id, (err, etags) => { - if (err) { - return this.emit('error', err) - } - - // It is possible for no parts to be already uploaded. - if (!etags) { - etags = [] - } - - // oldParts will become an object, allowing oldParts[partNumber].etag - this.oldParts = etags.reduce(function (prev, item) { - if (!prev[item.part]) { - prev[item.part] = item - } - return prev - }, {}) - - this.emit('ready') - }) - }) - - return - } - - // Continue uploading various parts if we have initiated multipart upload. - let partNumber = this.partNumber++ - - // Check to see if we've already uploaded this chunk. If the hash sums match, - // we can skip to the next chunk. - if (this.oldParts) { - let oldPart = this.oldParts[partNumber] - - // Calulcate the md5 hash, if it has not already been calculated. - if (!md5digest) { - md5digest = Crypto.createHash('md5').update(chunk).digest() - } - - if (oldPart && md5digest.toString('hex') === oldPart.etag) { - // The md5 matches, the chunk has already been uploaded. - this.etags.push({ part: partNumber, etag: oldPart.etag }) - - callback() - return - } - } - - // Write the chunk with an uploader. - let query = querystring.stringify({ - partNumber: partNumber, - uploadId: this.id, - }) - - let options = { - method, - query, - headers, - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, chunk, [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - - // In order to aggregate the parts together, we need to collect the etags. - let etag = response.headers.etag - if (etag) { - etag = etag.replace(/^"/, '').replace(/"$/, '') - } - - this.etags.push({ part: partNumber, etag }) - - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // We're ready for the next chunk. - callback() - }) - } - - _flush(callback) { - if (this.emptyStream) { - let method = 'PUT' - let headers = Object.assign({}, this.metaData, { 'Content-Length': 0 }) - let options = { - method, - headers, - query: '', - bucketName: this.bucketName, - objectName: this.objectName, - } - - this.client.makeRequest(options, '', [200], '', true, (err, response) => { - if (err) { - return callback(err) - } - - let result = { - etag: sanitizeETag(response.headers.etag), - versionId: getVersionId(response.headers), - } - - // Ignore the 'data' event so that the stream closes. (nodejs stream requirement) - response.on('data', () => {}) - - // Give the etag back, we're done! - process.nextTick(() => { - this.callback(null, result) - }) - - // Because we're sure the stream has ended, allow it to flush and end. - callback() - }) - - return - } - // If it has been uploaded in a single packet, we don't have to do anything. - if (this.id === null) { - return - } - - // This is called when all of the chunks uploaded successfully, thus - // completing the multipart upload. - this.client.completeMultipartUpload(this.bucketName, this.objectName, this.id, this.etags, (err, etag) => { - if (err) { - return callback(err) - } - - // Call our callback on the next tick to allow the streams infrastructure - // to finish what its doing before we continue. - process.nextTick(() => { - this.callback(null, etag) - }) - - callback() - }) - } -} - -// deprecated default export, please use named exports. -// keep for backward compatibility. -// eslint-disable-next-line import/no-default-export -export default ObjectUploader diff --git a/src/postPolicy.ts b/src/postPolicy.ts new file mode 100644 index 00000000..a1092e50 --- /dev/null +++ b/src/postPolicy.ts @@ -0,0 +1,104 @@ +// Build PostPolicy object that can be signed by presignedPostPolicy +import * as errors from './errors.ts' +import type { MetaData } from './helpers.ts' +import { isObject, isValidBucketName, isValidObjectName, isValidPrefix } from './helpers.ts' + +export class PostPolicy { + public policy: { conditions: (string | number)[][]; expiration?: string } + public formData: Record + + constructor() { + this.policy = { + conditions: [], + } + this.formData = {} + } + + // set expiration date + setExpires(date: Date) { + if (!date) { + throw new errors.InvalidDateError('Invalid date: cannot be null') + } + this.policy.expiration = date.toISOString() + } + + // set object name + setKey(objectName: string) { + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name : ${objectName}`) + } + this.policy.conditions.push(['eq', '$key', objectName]) + this.formData.key = objectName + } + + // set object name prefix, i.e policy allows any keys with this prefix + setKeyStartsWith(prefix: string) { + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + this.policy.conditions.push(['starts-with', '$key', prefix]) + this.formData.key = prefix + } + + // set bucket name + setBucket(bucketName: string) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`) + } + this.policy.conditions.push(['eq', '$bucket', bucketName]) + this.formData.bucket = bucketName + } + + // set Content-Type + setContentType(type: string) { + if (!type) { + throw new Error('content-type cannot be null') + } + this.policy.conditions.push(['eq', '$Content-Type', type]) + this.formData['Content-Type'] = type + } + + // set Content-Type prefix, i.e image/ allows any image + setContentTypeStartsWith(prefix: string) { + if (!prefix) { + throw new Error('content-type cannot be null') + } + this.policy.conditions.push(['starts-with', '$Content-Type', prefix]) + this.formData['Content-Type'] = prefix + } + + // set Content-Disposition + setContentDisposition(value: string) { + if (!value) { + throw new Error('content-disposition cannot be null') + } + this.policy.conditions.push(['eq', '$Content-Disposition', value]) + this.formData['Content-Disposition'] = value + } + + // set minimum/maximum length of what Content-Length can be. + setContentLengthRange(min: number, max: number) { + if (min > max) { + throw new Error('min cannot be more than max') + } + if (min < 0) { + throw new Error('min should be > 0') + } + if (max < 0) { + throw new Error('max should be > 0') + } + this.policy.conditions.push(['content-length-range', min, max]) + } + + // set user defined metadata + setUserMetaData(metaData: MetaData) { + if (!isObject(metaData)) { + throw new TypeError('metadata should be of type "object"') + } + Object.entries(metaData).forEach(([key, value]) => { + const amzMetaDataKey = `x-amz-meta-${key}` + this.policy.conditions.push(['eq', `$${amzMetaDataKey}`, value]) + this.formData[amzMetaDataKey] = value.toString() + }) + } +} diff --git a/src/qs.ts b/src/qs.ts new file mode 100644 index 00000000..56c17504 --- /dev/null +++ b/src/qs.ts @@ -0,0 +1,7 @@ +import queryString from 'query-string' + +// rfc 3986 encoding. +// `URLSearchParams` and `node:querystring` won't work +export function qs(q: Record): string { + return queryString.stringify(q) +} diff --git a/src/request.ts b/src/request.ts new file mode 100644 index 00000000..6846f6fc --- /dev/null +++ b/src/request.ts @@ -0,0 +1,29 @@ +import * as http from 'node:http' +import * as https from 'node:https' +import type * as stream from 'node:stream' + +export async function request( + opt: https.RequestOptions, + isHttp: boolean, + body: Buffer | string | stream.Readable | undefined = undefined, +): Promise { + const transport = isHttp ? http : https + + return new Promise((resolve, reject) => { + const requestObj = transport.request(opt, (resp) => { + resolve(resp) + }) + + requestObj.on('error', (e: unknown) => { + reject(e) + }) + + if (body) { + if (!Buffer.isBuffer(body) && typeof body !== 'string') { + body.on('error', reject) + } + + requestObj.end(body) + } + }) +} diff --git a/src/response.ts b/src/response.ts new file mode 100644 index 00000000..bb3a0b15 --- /dev/null +++ b/src/response.ts @@ -0,0 +1,26 @@ +import type http from 'node:http' +import type stream from 'node:stream' + +export async function readAsBuffer(res: stream.Readable): Promise { + return new Promise((resolve, reject) => { + const body: Buffer[] = [] + res + .on('data', (chunk: Buffer) => body.push(chunk)) + .on('error', (e) => reject(e)) + .on('end', () => resolve(Buffer.concat(body))) + }) +} + +export async function readAsString(res: http.IncomingMessage): Promise { + const body = await readAsBuffer(res) + return body.toString() +} + +export async function drainResponse(res: stream.Readable): Promise { + return new Promise((resolve, reject) => { + res + .on('data', () => {}) + .on('error', (e) => reject(e)) + .on('end', () => resolve()) + }) +} diff --git a/src/s3-endpoints.js b/src/s3-endpoints.ts similarity index 87% rename from src/s3-endpoints.js rename to src/s3-endpoints.ts index aa6a7921..a3f20e68 100644 --- a/src/s3-endpoints.js +++ b/src/s3-endpoints.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { isString } from './helpers.js' +import { isString } from './helpers.ts' // List of currently supported endpoints. const awsS3Endpoint = { @@ -35,16 +35,20 @@ const awsS3Endpoint = { 'ap-east-1': 's3.ap-east-1.amazonaws.com', 'eu-north-1': 's3.eu-north-1.amazonaws.com', // Add new endpoints here. -} +} as const + +export type Region = keyof typeof awsS3Endpoint | string // getS3Endpoint get relevant endpoint for the region. -export function getS3Endpoint(region) { +export function getS3Endpoint(region: string): string { if (!isString(region)) { throw new TypeError(`Invalid region: ${region}`) } - var endpoint = awsS3Endpoint[region] + + const endpoint = (awsS3Endpoint as Record)[region] if (endpoint) { return endpoint } + return 's3.amazonaws.com' } diff --git a/src/signing.js b/src/signing.ts similarity index 84% rename from src/signing.js rename to src/signing.ts index 247206f6..758cec10 100644 --- a/src/signing.js +++ b/src/signing.ts @@ -16,10 +16,9 @@ import * as Crypto from 'node:crypto' -import _ from 'lodash' - import * as errors from './errors.ts' -import { getScope, isArray, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.js' +import { getScope, isArray, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './helpers.ts' +import type { ICanonicalRequest, IRequest, RequestHeaders } from './type.ts' const signV4Algorithm = 'AWS4-HMAC-SHA256' @@ -33,7 +32,13 @@ const signV4Algorithm = 'AWS4-HMAC-SHA256' // \n // // -function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload) { +function getCanonicalRequest( + method: string, + path: string, + headers: RequestHeaders, + signedHeaders: string[], + hashedPayload: string, +): ICanonicalRequest { if (!isString(method)) { throw new TypeError('method should be of type "string"') } @@ -49,12 +54,13 @@ function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload if (!isString(hashedPayload)) { throw new TypeError('hashedPayload should be of type "string"') } + const headersArray = signedHeaders.reduce((acc, i) => { // Trim spaces from the value (required by V4 spec) const val = `${headers[i]}`.replace(/ +/g, ' ') acc.push(`${i.toLowerCase()}:${val}`) return acc - }, []) + }, [] as string[]) const requestResource = path.split('?')[0] let requestQuery = path.split('?')[1] @@ -66,7 +72,7 @@ function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload requestQuery = requestQuery .split('&') .sort() - .map((element) => (element.indexOf('=') === -1 ? element + '=' : element)) + .map((element) => (!element.includes('=') ? element + '=' : element)) .join('&') } @@ -81,7 +87,7 @@ function getCanonicalRequest(method, path, headers, signedHeaders, hashedPayload } // generate a credential string -function getCredential(accessKey, region, requestDate, serviceName = 's3') { +function getCredential(accessKey: string, region: string, requestDate?: Date, serviceName = 's3') { if (!isString(accessKey)) { throw new TypeError('accessKey should be of type "string"') } @@ -95,7 +101,7 @@ function getCredential(accessKey, region, requestDate, serviceName = 's3') { } // Returns signed headers array - alphabetically sorted -function getSignedHeaders(headers) { +function getSignedHeaders(headers: RequestHeaders): string[] { if (!isObject(headers)) { throw new TypeError('request should be of type "object"') } @@ -127,13 +133,13 @@ function getSignedHeaders(headers) { // Is skipped for obvious reasons const ignoredHeaders = ['authorization', 'content-length', 'content-type', 'user-agent'] - return _.map(headers, (v, header) => header) - .filter((header) => ignoredHeaders.indexOf(header) === -1) + return Object.keys(headers) + .filter((header) => !ignoredHeaders.includes(header)) .sort() } // returns the key used for calculating signature -function getSigningKey(date, region, secretKey, serviceName = 's3') { +function getSigningKey(date: Date, region: string, secretKey: string, serviceName = 's3') { if (!isObject(date)) { throw new TypeError('date should be of type "object"') } @@ -144,7 +150,7 @@ function getSigningKey(date, region, secretKey, serviceName = 's3') { throw new TypeError('secretKey should be of type "string"') } const dateLine = makeDateShort(date) - let hmac1 = Crypto.createHmac('sha256', 'AWS4' + secretKey) + const hmac1 = Crypto.createHmac('sha256', 'AWS4' + secretKey) .update(dateLine) .digest(), hmac2 = Crypto.createHmac('sha256', hmac1).update(region).digest(), @@ -153,7 +159,7 @@ function getSigningKey(date, region, secretKey, serviceName = 's3') { } // returns the string that needs to be signed -function getStringToSign(canonicalRequest, requestDate, region, serviceName = 's3') { +function getStringToSign(canonicalRequest: ICanonicalRequest, requestDate: Date, region: string, serviceName = 's3') { if (!isString(canonicalRequest)) { throw new TypeError('canonicalRequest should be of type "string"') } @@ -165,17 +171,13 @@ function getStringToSign(canonicalRequest, requestDate, region, serviceName = 's } const hash = Crypto.createHash('sha256').update(canonicalRequest).digest('hex') const scope = getScope(region, requestDate, serviceName) - const stringToSign = [] - stringToSign.push(signV4Algorithm) - stringToSign.push(makeDateLong(requestDate)) - stringToSign.push(scope) - stringToSign.push(hash) - const signString = stringToSign.join('\n') - return signString + const stringToSign = [signV4Algorithm, makeDateLong(requestDate), scope, hash] + + return stringToSign.join('\n') } // calculate the signature of the POST policy -export function postPresignSignatureV4(region, date, secretKey, policyBase64) { +export function postPresignSignatureV4(region: string, date: Date, secretKey: string, policyBase64: string): string { if (!isString(region)) { throw new TypeError('region should be of type "string"') } @@ -193,7 +195,14 @@ export function postPresignSignatureV4(region, date, secretKey, policyBase64) { } // Returns the authorization header -export function signV4(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { +export function signV4( + request: IRequest, + accessKey: string, + secretKey: string, + region: string, + requestDate: Date, + serviceName = 's3', +) { if (!isObject(request)) { throw new TypeError('request should be of type "object"') } @@ -214,7 +223,7 @@ export function signV4(request, accessKey, secretKey, region, requestDate, servi throw new errors.SecretKeyRequiredError('secretKey is required for signing') } - const sha256sum = request.headers['x-amz-content-sha256'] + const sha256sum = request.headers['x-amz-content-sha256'] as string const signedHeaders = getSignedHeaders(request.headers) const canonicalRequest = getCanonicalRequest(request.method, request.path, request.headers, signedHeaders, sha256sum) @@ -229,11 +238,27 @@ export function signV4(request, accessKey, secretKey, region, requestDate, servi .toLowerCase()}, Signature=${signature}` } -export function signV4ByServiceName(request, accessKey, secretKey, region, requestDate, serviceName = 's3') { +export function signV4ByServiceName( + request: IRequest, + accessKey: string, + secretKey: string, + region: string, + requestDate: Date, + serviceName = 's3', +): string { return signV4(request, accessKey, secretKey, region, requestDate, serviceName) } + // returns a presigned URL string -export function presignSignatureV4(request, accessKey, secretKey, sessionToken, region, requestDate, expires) { +export function presignSignatureV4( + request: IRequest, + accessKey: string, + secretKey: string, + sessionToken: string, + region: string, + requestDate: Date, + expires: unknown, +) { if (!isObject(request)) { throw new TypeError('request should be of type "object"') } @@ -294,6 +319,5 @@ export function presignSignatureV4(request, accessKey, secretKey, sessionToken, const stringToSign = getStringToSign(canonicalRequest, requestDate, region) const signingKey = getSigningKey(requestDate, region, secretKey) const signature = Crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex').toLowerCase() - const presignedUrl = request.protocol + '//' + request.headers.host + path + `&X-Amz-Signature=${signature}` - return presignedUrl + return request.protocol + '//' + request.headers.host + path + `&X-Amz-Signature=${signature}` } diff --git a/src/streamify.ts b/src/streamify.ts new file mode 100644 index 00000000..02ab2d65 --- /dev/null +++ b/src/streamify.ts @@ -0,0 +1,30 @@ +import * as stream from 'node:stream' + +const Generator = async function* () {}.constructor + +export class StreamGenerators extends stream.Readable { + private _g: AsyncGenerator + + constructor(g: AsyncGeneratorFunction) { + if (!(g instanceof Generator)) { + throw new TypeError('First argument must be a ES6 Generator') + } + + super({ objectMode: true }) + this._g = g() + } + + async _read() { + try { + const { done, value } = await this._g.next() + + if (done) { + this.push(null) + } else { + this.push(value) + } + } catch (e) { + this.emit('error', e) + } + } +} diff --git a/src/transformers.js b/src/transformers.js deleted file mode 100644 index 4cde9a2a..00000000 --- a/src/transformers.js +++ /dev/null @@ -1,263 +0,0 @@ -/* - * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import * as Crypto from 'node:crypto' - -import JSONParser from 'json-stream' -import _ from 'lodash' -import Through2 from 'through2' - -import * as errors from './errors.ts' -import { isFunction } from './helpers.js' -import * as xmlParsers from './xml-parsers.js' - -// getConcater returns a stream that concatenates the input and emits -// the concatenated output when 'end' has reached. If an optional -// parser function is passed upon reaching the 'end' of the stream, -// `parser(concatenated_data)` will be emitted. -export function getConcater(parser, emitError) { - var objectMode = false - var bufs = [] - - if (parser && !isFunction(parser)) { - throw new TypeError('parser should be of type "function"') - } - - if (parser) { - objectMode = true - } - - return Through2( - { objectMode }, - function (chunk, enc, cb) { - bufs.push(chunk) - cb() - }, - function (cb) { - if (emitError) { - cb(parser(Buffer.concat(bufs).toString())) - // cb(e) would mean we have to emit 'end' by explicitly calling this.push(null) - this.push(null) - return - } - if (bufs.length) { - if (parser) { - this.push(parser(Buffer.concat(bufs).toString())) - } else { - this.push(Buffer.concat(bufs)) - } - } - cb() - }, - ) -} - -// Generates an Error object depending on http statusCode and XML body -export function getErrorTransformer(response) { - var statusCode = response.statusCode - var code, message - if (statusCode === 301) { - code = 'MovedPermanently' - message = 'Moved Permanently' - } else if (statusCode === 307) { - code = 'TemporaryRedirect' - message = 'Are you using the correct endpoint URL?' - } else if (statusCode === 403) { - code = 'AccessDenied' - message = 'Valid and authorized credentials required' - } else if (statusCode === 404) { - code = 'NotFound' - message = 'Not Found' - } else if (statusCode === 405) { - code = 'MethodNotAllowed' - message = 'Method Not Allowed' - } else if (statusCode === 501) { - code = 'MethodNotAllowed' - message = 'Method Not Allowed' - } else { - code = 'UnknownError' - message = `${statusCode}` - } - - var headerInfo = {} - // A value created by S3 compatible server that uniquely identifies - // the request. - headerInfo.amzRequestid = response.headersSent ? response.getHeader('x-amz-request-id') : null - // A special token that helps troubleshoot API replies and issues. - headerInfo.amzId2 = response.headersSent ? response.getHeader('x-amz-id-2') : null - // Region where the bucket is located. This header is returned only - // in HEAD bucket and ListObjects response. - headerInfo.amzBucketRegion = response.headersSent ? response.getHeader('x-amz-bucket-region') : null - - return getConcater((xmlString) => { - let getError = () => { - // Message should be instantiated for each S3Errors. - var e = new errors.S3Error(message) - // S3 Error code. - e.code = code - _.each(headerInfo, (value, key) => { - e[key] = value - }) - return e - } - if (!xmlString) { - return getError() - } - let e - try { - e = xmlParsers.parseError(xmlString, headerInfo) - } catch (ex) { - return getError() - } - return e - }, true) -} - -// A through stream that calculates md5sum and sha256sum -export function getHashSummer(enableSHA256) { - var md5 = Crypto.createHash('md5') - var sha256 = Crypto.createHash('sha256') - - return Through2.obj( - function (chunk, enc, cb) { - if (enableSHA256) { - sha256.update(chunk) - } else { - md5.update(chunk) - } - cb() - }, - function (cb) { - var md5sum = '' - var sha256sum = '' - if (enableSHA256) { - sha256sum = sha256.digest('hex') - } else { - md5sum = md5.digest('base64') - } - var hashData = { md5sum, sha256sum } - this.push(hashData) - this.push(null) - cb() - }, - ) -} - -// Following functions return a stream object that parses XML -// and emits suitable Javascript objects. - -// Parses CopyObject response. -export function getCopyObjectTransformer() { - return getConcater(xmlParsers.parseCopyObject) -} - -// Parses listBuckets response. -export function getListBucketTransformer() { - return getConcater(xmlParsers.parseListBucket) -} - -// Parses listMultipartUploads response. -export function getListMultipartTransformer() { - return getConcater(xmlParsers.parseListMultipart) -} - -// Parses listParts response. -export function getListPartsTransformer() { - return getConcater(xmlParsers.parseListParts) -} - -// Parses initMultipartUpload response. -export function getInitiateMultipartTransformer() { - return getConcater(xmlParsers.parseInitiateMultipart) -} - -// Parses listObjects response. -export function getListObjectsTransformer() { - return getConcater(xmlParsers.parseListObjects) -} - -// Parses listObjects response. -export function getListObjectsV2Transformer() { - return getConcater(xmlParsers.parseListObjectsV2) -} - -// Parses listObjects with metadata response. -export function getListObjectsV2WithMetadataTransformer() { - return getConcater(xmlParsers.parseListObjectsV2WithMetadata) -} - -// Parses completeMultipartUpload response. -export function getCompleteMultipartTransformer() { - return getConcater(xmlParsers.parseCompleteMultipart) -} - -// Parses getBucketLocation response. -export function getBucketRegionTransformer() { - return getConcater(xmlParsers.parseBucketRegion) -} - -// Parses GET/SET BucketNotification response -export function getBucketNotificationTransformer() { - return getConcater(xmlParsers.parseBucketNotification) -} - -// Parses a notification. -export function getNotificationTransformer() { - // This will parse and return each object. - return new JSONParser() -} - -export function bucketVersioningTransformer() { - return getConcater(xmlParsers.parseBucketVersioningConfig) -} - -export function getTagsTransformer() { - return getConcater(xmlParsers.parseTagging) -} - -export function lifecycleTransformer() { - return getConcater(xmlParsers.parseLifecycleConfig) -} - -export function objectLockTransformer() { - return getConcater(xmlParsers.parseObjectLockConfig) -} - -export function objectRetentionTransformer() { - return getConcater(xmlParsers.parseObjectRetentionConfig) -} -export function bucketEncryptionTransformer() { - return getConcater(xmlParsers.parseBucketEncryptionConfig) -} - -export function replicationConfigTransformer() { - return getConcater(xmlParsers.parseReplicationConfig) -} - -export function objectLegalHoldTransformer() { - return getConcater(xmlParsers.parseObjectLegalHoldConfig) -} - -export function uploadPartTransformer() { - return getConcater(xmlParsers.uploadPartParser) -} -export function selectObjectContentTransformer() { - return getConcater() -} - -export function removeObjectsTransformer() { - return getConcater(xmlParsers.removeObjectsParser) -} diff --git a/src/transformers.ts b/src/transformers.ts new file mode 100644 index 00000000..a387b9f8 --- /dev/null +++ b/src/transformers.ts @@ -0,0 +1,161 @@ +/* + * MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as crypto from 'node:crypto' +import type { ServerResponse } from 'node:http' +import type * as stream from 'node:stream' + +import Through2 from 'through2' + +import * as errors from './errors.ts' +import { isFunction } from './helpers.ts' +import * as xmlParsers from './xml-parsers.ts' + +// getConcater returns a stream that concatenates the input and emits +// the concatenated output when 'end' has reached. If an optional +// parser function is passed upon reaching the 'end' of the stream, +// `parser(concatenated_data)` will be emitted. +export function getConcater(parser?: undefined | ((xml: string) => any), emitError?: boolean): stream.Transform { + let objectMode = false + const bufs: Buffer[] = [] + + if (parser && !isFunction(parser)) { + throw new TypeError('parser should be of type "function"') + } + + if (parser) { + objectMode = true + } + + return Through2( + { objectMode }, + function (chunk, enc, cb) { + bufs.push(chunk) + cb() + }, + function (cb) { + if (emitError) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + cb(parser(Buffer.concat(bufs).toString())) + // cb(e) would mean we have to emit 'end' by explicitly calling this.push(null) + this.push(null) + return + } + if (bufs.length) { + if (parser) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.push(parser(Buffer.concat(bufs).toString())) + } else { + this.push(Buffer.concat(bufs)) + } + } + cb() + }, + ) +} + +// Generates an Error object depending on http statusCode and XML body +export function getErrorTransformer(response: ServerResponse) { + const statusCode = response.statusCode + let code: string, message: string + if (statusCode === 301) { + code = 'MovedPermanently' + message = 'Moved Permanently' + } else if (statusCode === 307) { + code = 'TemporaryRedirect' + message = 'Are you using the correct endpoint URL?' + } else if (statusCode === 403) { + code = 'AccessDenied' + message = 'Valid and authorized credentials required' + } else if (statusCode === 404) { + code = 'NotFound' + message = 'Not Found' + } else if (statusCode === 405) { + code = 'MethodNotAllowed' + message = 'Method Not Allowed' + } else if (statusCode === 501) { + code = 'MethodNotAllowed' + message = 'Method Not Allowed' + } else { + code = 'UnknownError' + message = `${statusCode}` + } + + const headerInfo: Record = {} + // A value created by S3 compatible server that uniquely identifies the request. + headerInfo.amzRequestid = response.headersSent ? (response.getHeader('x-amz-request-id') as string | undefined) : null + // A special token that helps troubleshoot API replies and issues. + headerInfo.amzId2 = response.headersSent ? (response.getHeader('x-amz-id-2') as string | undefined) : null + // Region where the bucket is located. This header is returned only + // in HEAD bucket and ListObjects response. + headerInfo.amzBucketRegion = response.headersSent + ? (response.getHeader('x-amz-bucket-region') as string | undefined) + : null + + return getConcater((xmlString) => { + const getError = () => { + // Message should be instantiated for each S3Errors. + const e = new errors.S3Error(message, { cause: headerInfo }) + // S3 Error code. + e.code = code + Object.entries(headerInfo).forEach(([key, value]) => { + // @ts-expect-error force set error properties + e[key] = value + }) + return e + } + if (!xmlString) { + return getError() + } + let e + try { + e = xmlParsers.parseError(xmlString, headerInfo) + } catch (ex) { + return getError() + } + return e + }, true) +} + +export function hashBinary(buf: Buffer, enableSHA256: boolean) { + let sha256sum = '' + if (enableSHA256) { + sha256sum = crypto.createHash('sha256').update(buf).digest('hex') + } + const md5sum = crypto.createHash('md5').update(buf).digest('base64') + + return { md5sum, sha256sum } +} + +// Following functions return a stream object that parses XML +// and emits suitable Javascript objects. + +// Parses listMultipartUploads response. +export function getListMultipartTransformer() { + return getConcater(xmlParsers.parseListMultipart) +} + +// Parses listObjects response. +export function getListObjectsV2Transformer() { + return getConcater(xmlParsers.parseListObjectsV2) +} + +// Parses listObjects with metadata response. +export function getListObjectsV2WithMetadataTransformer() { + return getConcater(xmlParsers.parseListObjectsV2WithMetadata) +} diff --git a/src/type.ts b/src/type.ts new file mode 100644 index 00000000..d0d9e320 --- /dev/null +++ b/src/type.ts @@ -0,0 +1,239 @@ +import type { Readable as ReadableStream } from 'node:stream' + +export type Binary = string | Buffer +export type RequestHeaders = Record + +export interface IRequest { + protocol: string + port?: number | string + method: string + path: string + headers: RequestHeaders +} + +export type ICanonicalRequest = string + +export interface ICredentials { + accessKey: string + secretKey: string + sessionToken?: string +} + +export type UploadID = string + +export type LockUnit = 'Days' | 'Years' +export type LegalHoldStatus = 'ON' | 'OFF' +export type NoResultCallback = (error: unknown | null) => void +export type ResultCallback = (error: unknown | null, result: T) => void +export type TagList = Record +export type EmptyObject = Record +export type VersionIdentification = { versionId?: string } +export type Lifecycle = LifecycleConfig | null | '' +export type Lock = LockConfig | EmptyObject +export type Encryption = EncryptionConfig | EmptyObject +export type Retention = RetentionOptions | EmptyObject +export type IsoDate = string + +export type GetObjectOpt = { + versionId?: string +} + +export interface BucketItemCopy { + etag: string + lastModified?: Date +} + +export interface BucketItem { + name: string + prefix: string + size: number + etag: string + lastModified: Date +} + +export interface BucketItemWithMetadata extends BucketItem { + metadata: ItemBucketMetadata | ItemBucketMetadataList +} + +export type StatObjectOpts = { + versionId?: string +} + +export interface BucketItemStat { + size: number + etag: string + lastModified: Date + metaData: ItemBucketMetadata + // version id of the object if available + versionId: string | null +} + +export interface IncompleteUploadedBucketItem { + key: string + uploadId: string + size: number +} + +export interface BucketStream extends ReadableStream { + on(event: 'data', listener: (item: T) => void): this + + on(event: 'end' | 'pause' | 'readable' | 'resume' | 'close', listener: () => void): this + + on(event: 'error', listener: (err: Error) => void): this + + on(event: string | symbol, listener: (...args: any[]) => void): this +} + +export interface PostPolicyResult { + postURL: string + formData: { + [key: string]: any + } +} + +export interface MetadataItem { + Key: string + Value: string +} + +export interface ItemBucketMetadataList { + Items: MetadataItem[] +} + +export interface ItemBucketMetadata { + [key: string]: any +} + +export interface UploadedObjectInfo { + etag: string + versionId: string | null +} + +export interface Tag { + Key: string + Value: string +} + +export interface LifecycleConfig { + Rule: LifecycleRule[] +} + +export interface LifecycleRule { + [key: string]: any +} + +export interface LockConfig { + objectLockEnabled?: 'Enabled' + mode: Mode + unit: LockUnit + validity: number +} + +export interface EncryptionConfig { + Rule: EncryptionRule[] +} + +export interface EncryptionRule { + [key: string]: any +} + +export interface ReplicationConfig { + role: string + rules: [] +} + +export interface ReplicationConfig { + [key: string]: any +} + +export interface RetentionOptions { + versionId: string + mode?: Mode + retainUntilDate?: IsoDate + governanceBypass?: boolean +} + +export interface LegalHoldOptions { + versionId?: string + status: LegalHoldStatus +} + +export interface InputSerialization { + CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' + CSV?: { + AllowQuotedRecordDelimiter?: boolean + Comments?: string + FieldDelimiter?: string + FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' + QuoteCharacter?: string + QuoteEscapeCharacter?: string + RecordDelimiter?: string + } + JSON?: { + Type: 'DOCUMENT' | 'LINES' + } + Parquet?: EmptyObject +} + +export interface OutputSerialization { + CSV?: { + FieldDelimiter?: string + QuoteCharacter?: string + QuoteEscapeCharacter?: string + QuoteFields?: string + RecordDelimiter?: string + } + JSON?: { + RecordDelimiter?: string + } +} + +export interface SelectOptions { + expression: string + expressionType?: string + inputSerialization: InputSerialization + outputSerialization: OutputSerialization + requestProgress?: { Enabled: boolean } + scanRange?: { Start: number; End: number } +} + +export interface SourceObjectStats { + size: number + metaData: string + lastModicied: Date + versionId: string + etag: string +} + +export interface MakeBucketOpt { + ObjectLocking?: boolean +} + +export interface RemoveOptions { + versionId?: string + forceDelete?: boolean + governanceBypass?: boolean +} + +export interface BucketItemFromList { + name: string + // date when bucket was created + creationDate: Date +} + +export type VersioningConfig = Record + +export interface VersionConfigInput { + Status?: string + MfaDelete?: string + + [key: string]: any +} + +export type Mode = 'COMPLIANCE' | 'GOVERNANCE' + +export type ListObjectV1Opt = { + Delimiter?: string + MaxKeys?: number + IncludeVersion?: boolean +} diff --git a/src/typed-client.ts b/src/typed-client.ts new file mode 100644 index 00000000..e65c6096 --- /dev/null +++ b/src/typed-client.ts @@ -0,0 +1,1708 @@ +import * as stream from 'node:stream' + +import { TextEncoder } from 'web-encoding' +import xml2js from 'xml2js' + +import { asCallback, asCallbackFn } from './as-callback.ts' +import { fsp } from './async.ts' +import type { RequestMethod, RequestOption } from './client.ts' +import { Client, findCallback } from './client.ts' +import * as errors from './errors.ts' +import type { MetaData, SelectResults } from './helpers.ts' +import { + getScope, + insertContentType, + isArray, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isString, + isValidBucketName, + isValidDate, + isValidObjectName, + isValidPrefix, + LEGAL_HOLD_STATUS, + makeDateLong, + prependXAMZMeta, + RETENTION_MODES, + toMd5, + uriEscape, +} from './helpers.ts' +import { PostPolicy } from './postPolicy.ts' +import { qs } from './qs.ts' +import { readAsBuffer } from './response.ts' +import { postPresignSignatureV4, presignSignatureV4 } from './signing.ts' +import * as transformers from './transformers.ts' +import type { + BucketStream, + Encryption, + LegalHoldOptions, + Lifecycle, + ListObjectV1Opt, + NoResultCallback, + PostPolicyResult, + RemoveOptions, + RequestHeaders, + ResultCallback, + Retention, + SelectOptions, + Tag, + TagList, + UploadedObjectInfo, + VersionConfigInput, + VersionIdentification, + VersioningConfig, +} from './type.ts' +import type { S3ListObject } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' +import { parseSelectObjectContentResponse } from './xml-parsers.ts' + +export class TypedClient extends Client { + getBucketVersioning(bucketName: string, callback: ResultCallback): void + getBucketVersioning(bucketName: string): Promise + + getBucketVersioning(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + const method = 'GET' + const query = 'versioning' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketVersioningConfig(body.toString()) + }) + } + + setBucketVersioning(bucketName: string, versioningConfig: VersionConfigInput, callback: NoResultCallback): void + setBucketVersioning(bucketName: string, versioningConfig: VersionConfigInput): Promise + setBucketVersioning( + bucketName: string, + versionConfig: VersionConfigInput, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!Object.keys(versionConfig).length) { + throw new errors.InvalidArgumentError('versionConfig should be of type "object"') + } + + const method = 'PUT' + const query = 'versioning' + const builder = new xml2js.Builder({ + rootName: 'VersioningConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(versionConfig) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, payload) + }) + } + + /** + * Set the policy on a bucket or an object prefix. + * + * @param bucketName - name of the bucket + * @param bucketPolicy - bucket policy (JSON stringify'ed) + */ + setBucketPolicy(bucketName: string, bucketPolicy: string): Promise + setBucketPolicy(bucketName: string, bucketPolicy: string, callback: NoResultCallback): void + + setBucketPolicy(bucketName: string, policy: string, cb?: NoResultCallback): void | Promise { + // Validate arguments. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isString(policy)) { + throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`) + } + + let method: RequestMethod = 'DELETE' + const query = 'policy' + + if (policy) { + method = 'PUT' + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + }, + policy, + [204], + '', + ) + }) + } + + /** + * Set the policy on a bucket or an object prefix. + */ + getBucketPolicy(bucketName: string, callback: ResultCallback): void + getBucketPolicy(bucketName: string): Promise + + getBucketPolicy(bucketName: string, cb?: ResultCallback): void | Promise { + // Validate arguments. + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + const method = 'GET' + const query = 'policy' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }, '', [200], '') + const body = await readAsBuffer(res) + return body.toString() + }) + } + + /** + * Get Tags associated with a Bucket + */ + getBucketTagging(bucketName: string, callback: ResultCallback): void + getBucketTagging(bucketName: string): Promise + + getBucketTagging(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + + const method = 'GET' + const query = 'tagging' + const requestOptions: RequestOption = { method, bucketName, query } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseTagging(body.toString()) + }) + } + + /** Remove Tags on an Bucket/Object based on params + * __Arguments__ + * bucketName _string_ + * objectName _string_ (optional) + * removeOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + protected async removeTagging({ + bucketName, + objectName, + removeOpts, + }: { + removeOpts?: { versionId?: string } + bucketName: string + objectName?: string + }) { + const method = 'DELETE' + let query = 'tagging' + + if (removeOpts && removeOpts.versionId) { + query = `${query}&versionId=${removeOpts.versionId}` + } + const requestOptions: RequestOption = { method, bucketName, objectName, query } + + if (objectName) { + requestOptions['objectName'] = objectName + } + + await this.makeRequestAsync(requestOptions, '', [200, 204], '') + } + + /** + * Remove Tags associated with a bucket + */ + removeBucketTagging(bucketName: string, callback: NoResultCallback): void + removeBucketTagging(bucketName: string): Promise + + /** Remove Tags associated with a bucket + * __Arguments__ + * bucketName _string_ + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeBucketTagging(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + return asCallback(cb, this.removeTagging({ bucketName })) + } + + /** + * Set Tags on a Bucket + * + */ + setBucketTagging(bucketName: string, tags: TagList, callback: NoResultCallback): void + setBucketTagging(bucketName: string, tags: TagList): Promise + + setBucketTagging(bucketName: string, tags: TagList, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(tags)) { + throw new errors.InvalidArgumentError('tags should be of type "object"') + } + if (Object.keys(tags).length > 10) { + throw new errors.InvalidArgumentError('maximum tags allowed is 10"') + } + + return asCallback(cb, this.setTagging({ bucketName, tags })) + } + + getBucketLifecycle(bucketName: string, callback: ResultCallback): void + getBucketLifecycle(bucketName: string): Promise + + /** + * Get lifecycle configuration on a bucket. + */ + getBucketLifecycle(bucketName: string, cb?: ResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'GET' + const query = 'lifecycle' + const requestOptions: RequestOption = { method, bucketName, query } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseLifecycleConfig(body.toString()) + }) + } + + removeBucketLifecycle(bucketName: string, callback: NoResultCallback): void + removeBucketLifecycle(bucketName: string): Promise + + /** + * Remove lifecycle configuration of a bucket. + */ + removeBucketLifecycle(bucketName: string, cb?: NoResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'DELETE' + const query = 'lifecycle' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [204]) + }) + } + + // presignedPostPolicy can be used in situations where we want more control on the upload than what + // presignedPutObject() provides. i.e Using presignedPostPolicy we will be able to put policy restrictions + + // return PostPolicy object + newPostPolicy() { + return new PostPolicy() + } + + /** + * Put lifecycle configuration on a bucket. + * Apply lifecycle configuration on a bucket. + * + * this method is not documented yet so it's marked as `protected`, ts will not emit it in type definition + * + * @param bucketName + * @param policyConfig - a valid policy configuration object. + */ + protected async applyBucketLifecycle(bucketName: string, policyConfig: Lifecycle): Promise { + const method = 'PUT' + const query = 'lifecycle' + + const encoder = new TextEncoder() + const builder = new xml2js.Builder({ + rootName: 'LifecycleConfiguration', + headless: true, + renderOpts: { pretty: false }, + }) + + const payload = encoder.encode(builder.buildObject(policyConfig)) + const headers: RequestHeaders = { 'Content-MD5': toMd5(payload) } + await this.makeRequestAsyncOmit({ method, bucketName, query, headers }, payload) + } + + /** Set/Override lifecycle configuration on a bucket. if the configuration is empty, it removes the configuration. + * + * @param bucketName + * @param lifecycleConfig - null or empty object will remove bucket life cycle + * @param callback - if no callback, a promise will be returned + */ + setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle | null, callback: NoResultCallback): void + setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle | null): Promise + + setBucketLifecycle(bucketName: string, lifeCycleConfig: Lifecycle | null = null, cb?: NoResultCallback) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + return asCallbackFn(cb, async () => { + if (isEmpty(lifeCycleConfig)) { + await this.removeBucketLifecycle(bucketName) + } else { + await this.applyBucketLifecycle(bucketName, lifeCycleConfig) + } + }) + } + + // List the objects in the bucket. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `listOpts _object_: query params to list object with below keys + // * listOpts.MaxKeys _int_ maximum number of keys to return + // * listOpts.IncludeVersion _bool_ true|false to include versions. + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + // * `obj.lastModified` _Date_: modified time stamp + // * `obj.isDeleteMarker` _boolean_: true if it is a delete marker + + listObjects( + bucketName: string, + prefix: string, + recursive: boolean, + listOpts: { + MaxKeys?: number + IncludeVersion?: boolean + } = {}, + ): BucketStream { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isObject(listOpts)) { + throw new TypeError('listOpts should be of type "object"') + } + const listQueryOpts = { + Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/' + MaxKeys: 1000, + IncludeVersion: listOpts.IncludeVersion, + } + let objects: S3ListObject[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + + let marker = '' + // eslint-disable-next-line @typescript-eslint/no-misused-promises + readStream._read = async () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + + try { + const result = await this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts) + while (!ended) { + if (result.isTruncated) { + marker = result.nextMarker || (result.versionIdMarker as string) + } else { + ended = true + } + objects = result.objects + // @ts-expect-error next read + readStream._read() + } + } catch (e) { + readStream.emit('error', e) + } + } + + return readStream + } + + // list a batch of objects + protected async listObjectsQuery( + bucketName: string, + prefix: string, + marker: string, + { + Delimiter, + MaxKeys, + IncludeVersion, + }: Partial> & Required>, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(marker)) { + throw new TypeError('marker should be of type "string"') + } + + if (!isString(Delimiter)) { + throw new TypeError('Delimiter should be of type "string"') + } + if (!isNumber(MaxKeys)) { + throw new TypeError('MaxKeys should be of type "number"') + } + + const queries = [] + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(Delimiter)}`) + queries.push(`encoding-type=url`) + + if (IncludeVersion) { + queries.push(`versions`) + } + + if (marker) { + marker = uriEscape(marker) + if (IncludeVersion) { + queries.push(`key-marker=${marker}`) + } else { + queries.push(`marker=${marker}`) + } + } + + // no need to escape maxKeys + if (MaxKeys) { + if (MaxKeys >= 1000) { + MaxKeys = 1000 + } + queries.push(`max-keys=${MaxKeys}`) + } + queries.sort() + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + + const method = 'GET' + + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + + return xmlParsers.parseListObjects(body.toString()) + } + + putObjectRetention(bucketName: string, objectName: string, callback: NoResultCallback): void + putObjectRetention( + bucketName: string, + objectName: string, + retentionOptions: Retention, + callback: NoResultCallback, + ): void + putObjectRetention(bucketName: string, objectName: string, retentionOptions?: Retention): Promise + + putObjectRetention( + bucketName: string, + objectName: string, + retentionOptsOrCallback?: Retention | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let retentionOpts: Retention = {} + let cb: undefined | NoResultCallback + if (isFunction(retentionOptsOrCallback)) { + cb = retentionOptsOrCallback + } else { + retentionOpts = retentionOptsOrCallback as Retention + cb = callback + } + + if (!isObject(retentionOpts)) { + throw new errors.InvalidArgumentError('retentionOpts should be of type "object"') + } else { + if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) { + throw new errors.InvalidArgumentError(`Invalid value for governanceBypass: ${retentionOpts.governanceBypass}`) + } + if ( + retentionOpts.mode && + ![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode) + ) { + throw new errors.InvalidArgumentError(`Invalid object retention mode: ${retentionOpts.mode}`) + } + if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) { + throw new errors.InvalidArgumentError(`Invalid value for retainUntilDate: ${retentionOpts.retainUntilDate}`) + } + if (retentionOpts.versionId && !isString(retentionOpts.versionId)) { + throw new errors.InvalidArgumentError(`Invalid value for versionId: ${retentionOpts.versionId}`) + } + } + + const method = 'PUT' + let query = 'retention' + + const headers: RequestHeaders = {} + if (retentionOpts.governanceBypass) { + headers['X-Amz-Bypass-Governance-Retention'] = true + } + + const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true }) + const params: Record = {} + + if (retentionOpts.mode) { + params.Mode = retentionOpts.mode + } + if (retentionOpts.retainUntilDate) { + params.RetainUntilDate = retentionOpts.retainUntilDate + } + if (retentionOpts.versionId) { + query += `&versionId=${retentionOpts.versionId}` + } + + const payload = builder.buildObject(params) + + headers['Content-MD5'] = toMd5(payload) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + objectName, + query, + headers, + }, + payload, + [200, 204], + ) + }) + } + + getBucketEncryption(bucketName: string, callback: ResultCallback): void + getBucketEncryption(bucketName: string): Promise + getBucketEncryption(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isOptionalFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'encryption' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketEncryptionConfig(body.toString()) + }) + } + + setBucketEncryption(bucketName: string, encryptionConfig: Encryption, callback: NoResultCallback): void + setBucketEncryption(bucketName: string, encryptionConfig: Encryption): Promise + setBucketEncryption( + bucketName: string, + encryptionConfigOrCallback: Encryption | NoResultCallback | undefined, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + let encryptionConfig: Encryption | undefined + let cb: NoResultCallback | undefined + + if (isFunction(encryptionConfigOrCallback)) { + cb = encryptionConfigOrCallback + encryptionConfig = undefined + } else { + encryptionConfig = encryptionConfigOrCallback + cb = callback + } + + if (!isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) { + throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed: ' + encryptionConfig.Rule) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + let encryptionObj = encryptionConfig + if (isEmpty(encryptionConfig)) { + encryptionObj = { + // Default MinIO Server Supported Rule + Rule: [ + { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: 'AES256', + }, + }, + ], + } + } + + const method = 'PUT' + const query = 'encryption' + const builder = new xml2js.Builder({ + rootName: 'ServerSideEncryptionConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(encryptionObj) + + const headers: RequestHeaders = {} + headers['Content-MD5'] = toMd5(payload) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + /** + * Remove the specified object. + */ + removeObject(bucketName: string, objectName: string, removeOpts: RemoveOptions, callback: NoResultCallback): void + removeObject(bucketName: string, objectName: string, callback: NoResultCallback): void + removeObject(bucketName: string, objectName: string, removeOpts?: RemoveOptions): Promise + removeObject( + bucketName: string, + objectName: string, + removeOptsOrCallback: RemoveOptions | NoResultCallback = {}, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let removeOpts: RemoveOptions = {} + let cb: NoResultCallback | undefined + + // backward compatibility + if (isFunction(removeOptsOrCallback)) { + cb = removeOptsOrCallback + } else { + removeOpts = removeOptsOrCallback + cb = callback + } + + if (!isObject(removeOpts)) { + throw new errors.InvalidArgumentError('removeOpts should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'DELETE' + const queryParams: Record = {} + + if (removeOpts.versionId) { + queryParams.versionId = `${removeOpts.versionId}` + } + const headers: RequestHeaders = {} + if (removeOpts.governanceBypass) { + headers['X-Amz-Bypass-Governance-Retention'] = true + } + if (removeOpts.forceDelete) { + headers['x-minio-force-delete'] = true + } + + const query = qs(queryParams) + + const requestOptions: RequestOption = { method, bucketName, objectName, headers } + if (query) { + requestOptions['query'] = query + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit(requestOptions, '', [200, 204]) + }) + } + + /** + * Generate a generic pre-signed URL which can be used for HTTP methods GET, PUT, HEAD and DELETE + * + * @param httpMethod - name of the HTTP method + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param expires - expiry in seconds (optional, default 7 days) + * @param reqParams - request parameters (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} + * @param requestDate - A date object, the url will be issued at (optional) + */ + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expires?: number, + reqParams?: Record, + requestDate?: Date, + ): Promise + + presignedUrl(httpMethod: string, bucketName: string, objectName: string, callback: ResultCallback): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + reqParams: Record, + callback: ResultCallback, + ): void + presignedUrl( + httpMethod: string, + bucketName: string, + objectName: string, + expiry: number, + reqParams: Record, + requestDate: Date, + callback: ResultCallback, + ): void + + presignedUrl( + method: 'GET' | 'DELETE' | 'PUT' | 'POST', + bucketName: string, + objectName: string, + // expires?: number, + // reqParams?: Record, + // requestDate?: Date, + // callback?: ResultCallback, + ...originalArgs: unknown[] + ): void | Promise { + if (this.anonymous) { + throw new errors.AnonymousRequestError('Presigned ' + method + ' url cannot be generated for anonymous requests') + } + + let [[expires, reqParams, requestDate], cb] = findCallback< + [number, Record, Date], + ResultCallback + >(originalArgs) + + expires = expires ?? 24 * 60 * 60 * 7 // 7 days in seconds + reqParams = reqParams ?? {} + requestDate = requestDate ?? new Date() + + if (!isNumber(expires)) { + throw new TypeError(`expires should be of type "number", got ${expires}`) + } + if (!isObject(reqParams)) { + throw new TypeError(`reqParams should be of type "object", got ${reqParams}`) + } + if (!isValidDate(requestDate)) { + throw new TypeError(`requestDate should be of type "Date" and valid, got ${requestDate}`) + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const query = qs(reqParams) + return asCallbackFn(cb, async () => { + const region = await this.getBucketRegionAsync(bucketName) + + const reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query }) + void this.checkAndRefreshCreds() + return presignSignatureV4( + reqOptions, + this.accessKey, + this.secretKey, + this.sessionToken!, + region, + requestDate, + expires, + ) + }) + } + + /** + * Generate a presigned URL for GET + * + * @param bucketName - name of the bucket + * @param objectName - name of the object + * @param expires - expiry in seconds (optional, default 7 days) + * @param respHeaders - response headers to override or request params for query (optional) e.g {versionId:"10fa9946-3f64-4137-a58f-888065c0732e"} + * @param requestDate - A date object, the url will be issued at (optional) + */ + presignedGetObject( + bucketName: string, + objectName: string, + expires?: number, + respHeaders?: Record, + requestDate?: Date, + ): Promise + + presignedGetObject(bucketName: string, objectName: string, callback: ResultCallback): void + presignedGetObject(bucketName: string, objectName: string, expires: number, callback: ResultCallback): void + presignedGetObject( + bucketName: string, + objectName: string, + expires: number, + respHeaders: Record, + callback: ResultCallback, + ): void + presignedGetObject( + bucketName: string, + objectName: string, + expires: number, + respHeaders: Record, + requestDate: Date, + callback: ResultCallback, + ): void + + presignedGetObject( + bucketName: string, + objectName: string, + expires?: unknown, + respHeaders?: unknown, + requestDate?: unknown, + cb?: unknown, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (isFunction(respHeaders)) { + cb = respHeaders + respHeaders = {} + requestDate = new Date() + } + + const validRespHeaders = [ + 'response-content-type', + 'response-content-language', + 'response-expires', + 'response-cache-control', + 'response-content-disposition', + 'response-content-encoding', + ] + validRespHeaders.forEach((header) => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) { + throw new TypeError(`response header ${header} should be of type "string"`) + } + }) + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore presignedUrl will check type values, just leave it here for future refactor. + return this.presignedUrl('GET', bucketName, objectName, expires as number, respHeaders, requestDate as Date, cb) + } + + presignedPutObject(bucketName: string, objectName: string, callback: ResultCallback): void + presignedPutObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void + presignedPutObject(bucketName: string, objectName: string, expiry?: number): Promise + + // * `expiry` _number_: expiry in seconds (optional, default 7 days) + presignedPutObject( + bucketName: string, + objectName: string, + expires?: number | ResultCallback, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + return this.presignedUrl('PUT', bucketName, objectName, expires as number, cb) + } + + presignedPostPolicy(policy: PostPolicy, callback: ResultCallback): void + presignedPostPolicy(policy: PostPolicy): Promise + presignedPostPolicy(postPolicy: PostPolicy, cb?: ResultCallback): void | Promise { + return asCallbackFn(cb, async () => { + if (this.anonymous) { + throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests') + } + if (!isObject(postPolicy)) { + throw new TypeError('postPolicy should be of type "object"') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('cb should be of type "function"') + } + // @ts-expect-error index check + const region = await this.getBucketRegionAsync(postPolicy.formData.bucket) + const date = new Date() + const dateStr = makeDateLong(date) + void this.checkAndRefreshCreds() + + if (!postPolicy.policy.expiration) { + // 'expiration' is mandatory field for S3. + // Set default expiration date of 7 days. + const expires = new Date() + expires.setSeconds(24 * 60 * 60 * 7) + postPolicy.setExpires(expires) + } + + postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr]) + postPolicy.formData['x-amz-date'] = dateStr + + postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256']) + postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' + + postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)]) + postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date) + + if (this.sessionToken) { + postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken]) + postPolicy.formData['x-amz-security-token'] = this.sessionToken + } + + const policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64') + + postPolicy.formData.policy = policyBase64 + + postPolicy.formData['x-amz-signature'] = postPresignSignatureV4(region, date, this.secretKey, policyBase64) + const opts: RequestOption = { method: 'POST', region: region, bucketName: postPolicy.formData.bucket } + const reqOptions = this.getRequestOptions(opts) + const portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}` + const urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}` + return { postURL: urlStr, formData: postPolicy.formData } + }) + } + + setObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void + setObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions: VersionIdentification, + callback: NoResultCallback, + ): void + setObjectTagging( + bucketName: string, + objectName: string, + tags: TagList, + putOptions?: VersionIdentification, + ): Promise + + /** Set Tags on an Object + * __Arguments__ + * bucketName _string_ + * objectName _string_ + * * tags _object_ of the form {'':'','':''} + * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + setObjectTagging( + bucketName: string, + objectName: string, + tagsArg: TagList, + putOptsArg?: VersionIdentification | NoResultCallback, + cbArg?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + let [[tags, putOpts], cb] = findCallback<[TagList, VersionIdentification?], NoResultCallback>([ + tagsArg, + putOptsArg, + cbArg, + ]) + putOpts = putOpts ?? {} + + if (!isObject(tags)) { + throw new errors.InvalidArgumentError('tags should be of type "object"') + } + if (Object.keys(tags).length > 10) { + throw new errors.InvalidArgumentError('Maximum tags allowed is 10"') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallback(cb, this.setTagging({ bucketName, objectName, tags, putOpts })) + } + + /** To set Tags on a bucket or object based on the params + * __Arguments__ + * taggingParams _object_ Which contains the following properties + * bucketName _string_, + * objectName _string_ (Optional), + * tags _object_ of the form {'':'','':''} + * putOpts _object_ (Optional) e.g {versionId:"my-object-version-id"}, + * cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + async setTagging({ + bucketName, + objectName, + putOpts = {}, + tags, + }: { + tags: TagList + putOpts?: VersionIdentification + bucketName: string + objectName?: string + }): Promise { + const method = 'PUT' + let query = 'tagging' + + if (putOpts && putOpts.versionId) { + query = `${query}&versionId=${putOpts.versionId}` + } + const tagsList = [] + for (const [key, value] of Object.entries(tags)) { + tagsList.push({ Key: key, Value: value }) + } + const taggingConfig = { + Tagging: { + TagSet: { + Tag: tagsList, + }, + }, + } + const encoder = new TextEncoder() + const headers: RequestHeaders = {} + const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } }) + const payload = encoder.encode(builder.buildObject(taggingConfig)) + headers['Content-MD5'] = toMd5(payload) + const requestOptions: RequestOption = { method, bucketName, query, headers } + + if (objectName) { + requestOptions['objectName'] = objectName + } + headers['Content-MD5'] = toMd5(payload) + + await this.makeRequestAsyncOmit(requestOptions, payload) + } + + removeObjectTagging(bucketName: string, objectName: string, callback: NoResultCallback): void + removeObjectTagging( + bucketName: string, + objectName: string, + removeOptions: VersionIdentification, + callback: NoResultCallback, + ): void + removeObjectTagging(bucketName: string, objectName: string, removeOptions?: VersionIdentification): Promise + + /** Remove tags associated with an object + * __Arguments__ + * bucketName _string_ + * objectName _string_ + * removeOpts _object_ (Optional) e.g. {VersionID:"my-object-version-id"} + * `cb(error)` _function_ - callback function with `err` as the error argument. `err` is null if the operation is successful. + */ + removeObjectTagging( + bucketName: string, + objectName: string, + removeOptsArg?: VersionIdentification | NoResultCallback, + cbArg?: NoResultCallback, + ): Promise | void { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + const [[removeOpts], cb] = findCallback<[VersionIdentification?], NoResultCallback>([removeOptsArg, cbArg]) + if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) { + throw new errors.InvalidArgumentError('removeOpts should be of type "object"') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + return asCallback(cb, this.removeTagging({ bucketName, objectName, removeOpts })) + } + + selectObjectContent( + bucketName: string, + objectName: string, + selectOpts: SelectOptions, + callback: ResultCallback, + ): void + selectObjectContent(bucketName: string, objectName: string, selectOpts: SelectOptions): Promise + + selectObjectContent( + bucketName: string, + objectName: string, + selectOpts: SelectOptions, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isEmpty(selectOpts)) { + if (!isString(selectOpts.expression)) { + throw new TypeError('sqlExpression should be of type "string"') + } + if (!isEmpty(selectOpts.inputSerialization)) { + if (!isObject(selectOpts.inputSerialization)) { + throw new TypeError('inputSerialization should be of type "object"') + } + } else { + throw new TypeError('inputSerialization is required') + } + if (!isEmpty(selectOpts.outputSerialization)) { + if (!isObject(selectOpts.outputSerialization)) { + throw new TypeError('outputSerialization should be of type "object"') + } + } else { + throw new TypeError('outputSerialization is required') + } + } else { + throw new TypeError('valid select configuration is required') + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'POST' + let query = `select` + query += '&select-type=2' + + const config: unknown[] = [ + { + Expression: selectOpts.expression, + }, + { + ExpressionType: selectOpts.expressionType || 'SQL', + }, + { + InputSerialization: [selectOpts.inputSerialization], + }, + { + OutputSerialization: [selectOpts.outputSerialization], + }, + ] + + // Optional + if (selectOpts.requestProgress) { + config.push({ RequestProgress: selectOpts.requestProgress }) + } + // Optional + if (selectOpts.scanRange) { + config.push({ ScanRange: selectOpts.scanRange }) + } + + const builder = new xml2js.Builder({ + rootName: 'SelectObjectContentRequest', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }, payload) + return parseSelectObjectContentResponse(await readAsBuffer(res)) + }) + } + + getObjectRetention( + bucketName: string, + objectName: string, + options: VersionIdentification, + callback: ResultCallback, + ): void + getObjectRetention(bucketName: string, objectName: string, options: VersionIdentification): Promise + + getObjectRetention( + bucketName: string, + objectName: string, + getOpts: VersionIdentification, + cb?: ResultCallback, + ): Promise | void { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isObject(getOpts)) { + throw new errors.InvalidArgumentError('callback should be of type "object"') + } else if (getOpts.versionId && !isString(getOpts.versionId)) { + throw new errors.InvalidArgumentError('VersionID should be of type "string"') + } + if (cb && !isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + let query = 'retention' + if (getOpts.versionId) { + query += `&versionId=${getOpts.versionId}` + } + + return asCallbackFn(cb, async (): Promise => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectRetentionConfig(body.toString()) + }) + } + + getObjectTagging(bucketName: string, objectName: string, callback: ResultCallback): void + getObjectTagging( + bucketName: string, + objectName: string, + getOptions: VersionIdentification, + callback: ResultCallback, + ): void + getObjectTagging(bucketName: string, objectName: string, getOptions?: VersionIdentification): Promise + + getObjectTagging( + bucketName: string, + objectName: string, + getOptsArg?: VersionIdentification | ResultCallback, + cbArg?: ResultCallback, + ): void | Promise { + const method = 'GET' + let query = 'tagging' + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName) + } + + const [[getOpts = {}], cb] = findCallback<[VersionIdentification | undefined], ResultCallback>([ + getOptsArg, + cbArg, + ]) + + if (!isObject(getOpts)) { + throw new errors.InvalidArgumentError('getOpts should be of type "object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + if (getOpts && getOpts.versionId) { + query = `${query}&versionId=${getOpts.versionId}` + } + const requestOptions: RequestOption = { method, bucketName, query } + if (objectName) { + requestOptions['objectName'] = objectName + } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync(requestOptions) + const body = await readAsBuffer(res) + return xmlParsers.parseTagging(body.toString()) + }) + } + + getObjectLegalHold(bucketName: string, objectName: string, callback: ResultCallback): void + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptions: VersionIdentification, + callback: ResultCallback, + ): void + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptions?: VersionIdentification, + ): Promise + + getObjectLegalHold( + bucketName: string, + objectName: string, + getOptsArg?: VersionIdentification | ResultCallback, + cbArg?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const [[getOpts = {}], cb] = findCallback<[VersionIdentification], ResultCallback>([ + getOptsArg, + cbArg, + ]) + + if (!isObject(getOpts)) { + throw new TypeError('getOpts should be of type "Object"') + } else if (Object.keys(getOpts).length > 0 && getOpts.versionId && !isString(getOpts.versionId)) { + throw new TypeError('versionId should be of type string.:', getOpts.versionId) + } + + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + const method = 'GET' + let query = 'legal-hold' + + if (getOpts.versionId) { + query += `&versionId=${getOpts.versionId}` + } + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectLegalHoldConfig(body.toString()) + }) + } + + setObjectLegalHold(bucketName: string, objectName: string, callback: NoResultCallback): void + setObjectLegalHold( + bucketName: string, + objectName: string, + setOptions: LegalHoldOptions, + callback: NoResultCallback, + ): void + setObjectLegalHold(bucketName: string, objectName: string, setOptions?: LegalHoldOptions): Promise + + setObjectLegalHold( + bucketName: string, + objectName: string, + setOptions?: LegalHoldOptions | NoResultCallback, + callback?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + const defaultOpts: LegalHoldOptions = { + status: LEGAL_HOLD_STATUS.ENABLED, + } + + let [[setOpts = defaultOpts], cb] = findCallback<[LegalHoldOptions], NoResultCallback>([setOptions, callback]) + + if (!isObject(setOpts)) { + throw new TypeError('setOpts should be of type "Object"') + } else { + if (![LEGAL_HOLD_STATUS.ENABLED, LEGAL_HOLD_STATUS.DISABLED].includes(setOpts.status)) { + throw new TypeError('Invalid status: ' + setOpts.status) + } + if (setOpts.versionId && !setOpts.versionId.length) { + throw new TypeError('versionId should be of type string.:' + setOpts.versionId) + } + } + + if (!isOptionalFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + + if (isEmpty(setOpts)) { + setOpts = defaultOpts + } + + const method = 'PUT' + let query = 'legal-hold' + + if (setOpts.versionId) { + query += `&versionId=${setOpts.versionId}` + } + + const config = { + Status: setOpts.status, + } + + const builder = new xml2js.Builder({ rootName: 'LegalHold', renderOpts: { pretty: false }, headless: true }) + const payload = builder.buildObject(config) + const headers = { + 'Content-MD5': toMd5(payload), + } + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + objectName, + query, + headers, + }, + payload, + ) + }) + } + + /** + * Internal Method to abort a multipart upload request in case of any errors. + * @param bucketName __string__ Bucket Name + * @param objectName __string__ Object Name + * @param uploadId __string__ id of a multipart upload to cancel during compose object sequence. + */ + protected async abortMultipartUpload(bucketName: string, objectName: string, uploadId: string) { + // TODO: type callback + const method = 'DELETE' + const query = `uploadId=${uploadId}` + + const requestOptions: RequestOption = { method, bucketName, objectName: objectName, query } + await this.makeRequestAsyncOmit(requestOptions, '', [204]) + } + + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + callback: NoResultCallback, + ): void + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + ): Promise + + removeObjects( + bucketName: string, + objectsList: Array< + | string + | { + name: string + versionId?: string + } + >, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isArray(objectsList)) { + throw new errors.InvalidArgumentError('objectsList should be a list') + } + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const maxEntries = 1000 + const query = 'delete' + const method = 'POST' + + type O = + | string + | { + name: string + versionId?: string + } + + const result = objectsList.reduce( + (result, entry) => { + result.list.push(entry) + if (result.list.length === maxEntries) { + result.listOfList.push(result.list) + result.list = [] + } + return result + }, + { listOfList: [] as O[][], list: [] as O[] }, + ) + + if (result.list.length > 0) { + result.listOfList.push(result.list) + } + + return asCallbackFn(cb, async () => { + for (const list of result.listOfList) { + const objects: { Key: string; VersionId?: string }[] = [] + list.forEach(function (value) { + if (typeof value === 'string') { + objects.push({ Key: value }) + } else { + objects.push({ Key: value.name, VersionId: value.versionId }) + } + }) + const deleteObjects = { Delete: { Quiet: true, Object: objects } } + const builder = new xml2js.Builder({ headless: true }) + const payload = new TextEncoder().encode(builder.buildObject(deleteObjects)) + const headers = { + ['Content-MD5']: toMd5(payload), + } + + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + } + }) + } +} + +export class Helper { + constructor(private readonly client: Client) {} + + async MultipleFileUpload( + bucketName: string, + objectName: string, + filePath: string, + metaData: MetaData = {}, + ): Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + if (!isString(filePath)) { + throw new TypeError('filePath should be of type "string"') + } + + if (!isObject(metaData)) { + throw new TypeError('metaData should be of type "object"') + } + + // Inserts correct `content-type` attribute based on metaData and filePath + metaData = insertContentType(metaData, filePath) + + // Updates metaData to have the correct prefix if needed + metaData = prependXAMZMeta(metaData) + type Part = { + part: number + etag: string + } + + const executor = async (fd: number) => { + const stats = await fsp.fstat(fd) + const fileSize = stats.size + if (fileSize > this.client.maxObjectSize) { + throw new Error(`${filePath} size : ${stats.size}, max allowed size: 5TB`) + } + + if (fileSize <= this.client.partSize) { + // simple PUT request, no multipart + const uploader = this.client.getUploader(bucketName, objectName, metaData, false) + const buf = await fsp.readfile(fd) + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.client.enableSHA256) + return await uploader(buf, fileSize, sha256sum, md5sum) + } + + const previousUploadId = await this.client.findUploadId(bucketName, objectName) + let eTags: Part[] = [] + // if there was a previous incomplete upload, fetch all its uploaded parts info + let uploadId: string + if (previousUploadId) { + eTags = await this.client.listParts(bucketName, objectName, previousUploadId) + uploadId = previousUploadId + } else { + // there was no previous upload, initiate a new one + uploadId = await this.client.initiateNewMultipartUpload(bucketName, objectName, metaData) + } + + { + const partSize = this.client.calculatePartSize(fileSize) + const uploader = this.client.getUploader(bucketName, objectName, metaData, true) + // convert array to object to make things easy + const parts = eTags.reduce(function (acc, item) { + if (!acc[item.part]) { + acc[item.part] = item + } + return acc + }, {} as Record) + const partsDone: { part: number; etag: string }[] = [] + let partNumber = 1 + let uploadedSize = 0 + + // will be reused for hashing and uploading + // don't worry it's "unsafe", we will read data from fs to fill it + const buf = Buffer.allocUnsafe(this.client.partSize) + while (uploadedSize < fileSize) { + const part = parts[partNumber] + let length = partSize + if (length > fileSize - uploadedSize) { + length = fileSize - uploadedSize + } + + await fsp.read(fd, buf, 0, length, 0) + const { md5sum, sha256sum } = transformers.hashBinary(buf.subarray(0, length), this.client.enableSHA256) + + const md5sumHex = Buffer.from(md5sum, 'base64').toString('hex') + + if (part && md5sumHex === part.etag) { + // md5 matches, chunk already uploaded + partsDone.push({ part: partNumber, etag: part.etag }) + partNumber++ + uploadedSize += length + continue + } + + const objInfo = await uploader(uploadId, partNumber, buf.subarray(0, length), length, sha256sum, md5sum) + partsDone.push({ part: partNumber, etag: objInfo.etag }) + partNumber++ + uploadedSize += length + } + eTags = partsDone + } + + // at last, finish uploading + return this.client.completeMultipartUpload(bucketName, objectName, uploadId, eTags) + } + + const ensureFileClose = async (executor: (fd: number) => Promise) => { + let fd + try { + fd = await fsp.open(filePath, 'r') + } catch (e) { + throw new Error(`failed to open file ${filePath}: err ${e}`, { cause: e }) + } + + try { + // make sure to keep await, otherwise file will be closed early. + return await executor(fd) + } finally { + await fsp.fclose(fd) + } + } + + return ensureFileClose(executor) + } +} diff --git a/src/typed-client2.ts b/src/typed-client2.ts new file mode 100644 index 00000000..dceb2734 --- /dev/null +++ b/src/typed-client2.ts @@ -0,0 +1,941 @@ +import * as stream from 'node:stream' + +import async from 'async' +import _ from 'lodash' +import xml2js from 'xml2js' + +import { asCallback, asCallbackFn } from './as-callback.ts' +import { fsp } from './async.ts' +import type { RequestOption } from './client.ts' +import { findCallback, uploadStream } from './client.ts' +import { CopyConditions } from './copyConditions.ts' +import * as errors from './errors.ts' +import type { MetaData } from './helpers.ts' +import { + calculateEvenSplits, + CopyDestinationOptions, + CopySourceOptions, + extractMetadata, + getSourceVersionId, + getVersionId, + isArray, + isBoolean, + isEmpty, + isFunction, + isNumber, + isObject, + isOptionalFunction, + isReadableStream, + isString, + isValidBucketName, + isValidObjectName, + isValidPrefix, + PART_CONSTRAINTS, + partsRequired, + pipesetup, + prependXAMZMeta, + readableStream, + RETENTION_MODES, + RETENTION_VALIDITY_UNITS, + sanitizeETag, + toMd5, + uriEscape, + uriResourceEscape, +} from './helpers.ts' +import type { NotificationEvent } from './notification.ts' +import { NotificationConfig, NotificationPoller } from './notification.ts' +import { readAsBuffer } from './response.ts' +import * as transformers from './transformers.ts' +import type { + BucketItemCopy, + NoResultCallback, + RequestHeaders, + ResultCallback, + SourceObjectStats, + UploadedObjectInfo, +} from './type.ts' +import { TypedClient } from './typed-client.ts' +import type { ObjectLockConfig, S3ListObject } from './xml-parsers.ts' +import * as xmlParsers from './xml-parsers.ts' + +type PartConfig = { + bucketName: string + objectName: string + uploadID: string + partNumber: number + headers: RequestHeaders +} + +export class TypedClient2 extends TypedClient { + // * `callback(err, {etag, lastModified})` _function_: non null `err` indicates error, `etag` _string_ and `listModifed` _Date_ are respectively the etag and the last modified date of the newly copied object + protected copyObjectV1( + bucketName: string, + objectName: string, + srcObject: string, + arg4: unknown, + arg5: unknown, + ): Promise | void { + const [[conditions = null], cb] = findCallback<[CopyConditions | null], ResultCallback>([ + arg4, + arg5, + ]) + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + if (!isString(srcObject)) { + throw new TypeError('srcObject should be of type "string"') + } + if (srcObject === '') { + throw new errors.InvalidPrefixError(`Empty source prefix`) + } + + if (conditions !== null && !(conditions instanceof CopyConditions)) { + throw new TypeError('conditions should be of type "CopyConditions"') + } + + const headers: RequestHeaders = {} + headers['x-amz-copy-source'] = uriResourceEscape(srcObject) + + if (conditions !== null) { + if (conditions.modified !== '') { + headers['x-amz-copy-source-if-modified-since'] = conditions.modified + } + if (conditions.unmodified !== '') { + headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified + } + if (conditions.matchETag !== '') { + headers['x-amz-copy-source-if-match'] = conditions.matchETag + } + if (conditions.matchETagExcept !== '') { + headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept + } + } + + const method = 'PUT' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, headers }) + const body = await readAsBuffer(res) + return xmlParsers.parseCopyObject(body.toString()) + }) + } + + /** + * Internal Method to perform copy of an object. + * @param sourceConfig __object__ instance of CopySourceOptions @link ./helpers/CopySourceOptions + * @param destConfig __object__ instance of CopyDestinationOptions @link ./helpers/CopyDestinationOptions + * @param cb __function__ called with null if there is an error + * @returns Promise if no callack is passed. + */ + protected copyObjectV2( + sourceConfig: CopySourceOptions, + destConfig: CopyDestinationOptions, + cb?: ResultCallback, + ): Promise | void | false { + if (!(sourceConfig instanceof CopySourceOptions)) { + throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ') + } + if (!(destConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + if (!destConfig.validate()) { + return false + } + if (!destConfig.validate()) { + return false + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders()) + + const bucketName = destConfig.Bucket + const objectName = destConfig.Object + + const method = 'PUT' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, objectName, headers }) + const body = await readAsBuffer(res) + const data = xmlParsers.parseCopyObject(body.toString()) + + const resHeaders = res.headers + + return { + Bucket: destConfig.Bucket, + Key: destConfig.Object, + LastModified: data.lastModified, + lastModified: data.lastModified, + MetaData: extractMetadata(resHeaders), + VersionId: getVersionId(resHeaders), + SourceVersionId: getSourceVersionId(resHeaders), + Etag: sanitizeETag(resHeaders.etag), + etag: sanitizeETag(resHeaders.etag), + Size: parseInt(resHeaders['content-length']!), + } as BucketItemCopy + }) + } + + copyObject( + bucketName: string, + objectName: string, + sourceObject: string, + conditions: CopyConditions, + callback: ResultCallback, + ): void + copyObject( + bucketName: string, + objectName: string, + sourceObject: string, + conditions: CopyConditions, + ): Promise + + // Backward compatibility for Copy Object API. + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + copyObject(...allArgs): Promise | void | false { + if (allArgs[0] instanceof CopySourceOptions && allArgs[1] instanceof CopyDestinationOptions) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObjectV2(...allArgs) + } + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObjectV1(...allArgs) + } + + async uploadPartCopy(partConfig: PartConfig) { + const { bucketName, objectName, uploadID, partNumber, headers } = partConfig + + const method = 'PUT' + const query = `uploadId=${uploadID}&partNumber=${partNumber}` + const requestOptions: RequestOption = { method, bucketName, objectName: objectName, query, headers } + + const res = await this.makeRequestAsync(requestOptions) + + const body = await readAsBuffer(res) + + const data = xmlParsers.uploadPartParser(body.toString()) + + return { + etag: sanitizeETag(data.ETag), + key: objectName, + part: partNumber, + } + } + + // composeObject( + // destObjConfig: CopyDestinationOptions, + // sourceObjList: CopySourceOptions[], + // callback: ResultCallback, + // ): void + // composeObject(destObjConfig: CopyDestinationOptions, sourceObjList: CopySourceOptions[]): Promise + + composeObject( + destObjConfig: CopyDestinationOptions, + sourceObjList: CopySourceOptions[], + cb?: ResultCallback, + ): unknown { + const me = this // many async flows. so store the ref. + const sourceFilesLength = sourceObjList.length + + if (!isArray(sourceObjList)) { + throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ') + } + if (!(destObjConfig instanceof CopyDestinationOptions)) { + throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ') + } + + if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`, + ) + } + + if (!isOptionalFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + for (let i = 0; i < sourceFilesLength; i++) { + // @ts-expect-error index check + if (!sourceObjList[i].validate()) { + return false + } + } + + if (!destObjConfig.validate()) { + return false + } + + const getStatOptions = (srcConfig: CopySourceOptions) => { + let statOpts = {} + if (!isEmpty(srcConfig.VersionID)) { + statOpts = { + versionId: srcConfig.VersionID, + } + } + return statOpts + } + const srcObjectSizes: number[] = [] + let totalSize = 0 + let totalParts = 0 + + const sourceObjStats = sourceObjList.map((srcItem) => + me.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)), + ) + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return asCallback(cb, async () => { + const srcObjectInfos = await Promise.all(sourceObjStats) + const validatedStats = srcObjectInfos.map((resItemStat, index) => { + const srcConfig = sourceObjList[index] + + let srcCopySize = resItemStat.size + // Check if a segment is specified, and if so, is the + // segment within object bounds? + // @ts-expect-error index check + if (srcConfig.MatchRange) { + // Since range is specified, + // 0 <= src.srcStart <= src.srcEnd + // so only invalid case to check is: + // @ts-expect-error index check + const srcStart = srcConfig.Start + // @ts-expect-error index check + const srcEnd = srcConfig.End + if (srcEnd >= srcCopySize || srcStart < 0) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`, + ) + } + srcCopySize = srcEnd - srcStart + 1 + } + + // Only the last source may be less than `absMinPartSize` + if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) { + throw new errors.InvalidArgumentError( + `CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`, + ) + } + + // Is data to copy too large? + totalSize += srcCopySize + if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) { + throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`) + } + + // record source size + srcObjectSizes[index] = srcCopySize + + // calculate parts needed for current source + totalParts += partsRequired(srcCopySize) + // Do we need more parts than we are allowed? + if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) { + throw new errors.InvalidArgumentError( + `Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`, + ) + } + + return resItemStat + }) + + if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return this.copyObject(sourceObjList[0], destObjConfig) // use copyObjectV2 + } + + // preserve etag to avoid modification of object while copying. + for (let i = 0; i < sourceFilesLength; i++) { + // @ts-expect-error index check + sourceObjList[i].MatchETag = validatedStats[i].etag + } + + const newUploadHeaders = destObjConfig.getHeaders() + + const uploadId = await me.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders) + + const uploadList = validatedStats + .map((resItemStat, idx) => { + // @ts-expect-error index check + return calculateEvenSplits(srcObjectSizes[idx], sourceObjList[idx]) + }) + .flatMap((splitSize, splitIndex) => { + if (splitSize === null) { + throw new Error('BUG: splitSize === 0') + } + + const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize + + const partIndex = splitIndex + 1 // part index starts from 1. + const totalUploads = Array.from(startIdx) + + // @ts-expect-error index check + const headers = sourceObjList[splitIndex].getHeaders() + + return totalUploads.map((splitStart, upldCtrIdx) => { + const splitEnd = endIdx[upldCtrIdx] + + const sourceObj = `${objConfig.Bucket}/${objConfig.Object}` + headers['x-amz-copy-source'] = `${sourceObj}` + headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}` + + return { + bucketName: destObjConfig.Bucket, + objectName: destObjConfig.Object, + uploadID: uploadId, + partNumber: partIndex, + headers: headers, + sourceObj: sourceObj, + } as PartConfig + }) + }) + + try { + const rr = await async.map(uploadList, async (o: PartConfig) => me.uploadPartCopy(o)) + const partsDone = rr.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part })) + return me.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone) + } catch (e) { + await this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId) + throw e + } + }) + } + + setObjectLockConfig( + bucketName: string, + lockConfigOpts: ObjectLockConfig = {}, + cb?: NoResultCallback, + ): void | Promise { + const retentionModes = [RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE] + const validUnits = [RETENTION_VALIDITY_UNITS.DAYS, RETENTION_VALIDITY_UNITS.YEARS] + + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + + if (lockConfigOpts.mode && !retentionModes.includes(lockConfigOpts.mode)) { + throw new TypeError(`lockConfigOpts.mode should be one of ${retentionModes}`) + } + if (lockConfigOpts.unit && !validUnits.includes(lockConfigOpts.unit)) { + throw new TypeError(`lockConfigOpts.unit should be one of ${validUnits}`) + } + if (lockConfigOpts.validity && !isNumber(lockConfigOpts.validity)) { + throw new TypeError(`lockConfigOpts.validity should be a number`) + } + + const method = 'PUT' + const query = 'object-lock' + + const config: { ObjectLockEnabled: string; Rule?: { DefaultRetention: Record } } = { + ObjectLockEnabled: 'Enabled', + } + const configKeys = Object.keys(lockConfigOpts) + // Check if keys are present and all keys are present. + if (configKeys.length > 0) { + if (_.difference(configKeys, ['unit', 'mode', 'validity']).length !== 0) { + throw new TypeError( + `lockConfigOpts.mode,lockConfigOpts.unit,lockConfigOpts.validity all the properties should be specified.`, + ) + } else { + config.Rule = { + DefaultRetention: {}, + } + if (lockConfigOpts.mode) { + config.Rule.DefaultRetention.Mode = lockConfigOpts.mode + } + if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.DAYS) { + config.Rule.DefaultRetention.Days = lockConfigOpts.validity + } else if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.YEARS) { + config.Rule.DefaultRetention.Years = lockConfigOpts.validity + } + } + } + + const builder = new xml2js.Builder({ + rootName: 'ObjectLockConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + + const headers: RequestHeaders = {} + headers['Content-MD5'] = toMd5(payload) + + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + getObjectLockConfig( + bucketName: string, + cb?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'object-lock' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseObjectLockConfig(body.toString()) + }) + } + + removeBucketEncryption(bucketName: string, cb: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'DELETE' + const query = 'encryption' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [204]) + }) + } + + setBucketReplication( + bucketName: string, + replicationConfig: { + role?: string + rules?: unknown + } = {}, + cb?: NoResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(replicationConfig)) { + throw new errors.InvalidArgumentError('replicationConfig should be of type "object"') + } else { + if (isEmpty(replicationConfig.role)) { + throw new errors.InvalidArgumentError('Role cannot be empty') + } else if (replicationConfig.role && !isString(replicationConfig.role)) { + throw new errors.InvalidArgumentError('Invalid value for role', replicationConfig.role) + } + if (isEmpty(replicationConfig.rules)) { + throw new errors.InvalidArgumentError('Minimum one replication rule must be specified') + } + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + + const method = 'PUT' + const query = 'replication' + const headers: RequestHeaders = {} + + const replicationParamsConfig = { + ReplicationConfiguration: { + Role: replicationConfig.role, + Rule: replicationConfig.rules, + }, + } + + const builder = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true }) + + const payload = builder.buildObject(replicationParamsConfig) + + headers['Content-MD5'] = toMd5(payload) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + headers, + }, + payload, + ) + }) + } + + getBucketReplication(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new errors.InvalidArgumentError('callback should be of type "function"') + } + const method = 'GET' + const query = 'replication' + + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseReplicationConfig(body.toString()) + }) + } + + removeBucketReplication(bucketName: string, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + const method = 'DELETE' + const query = 'replication' + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit( + { + method, + bucketName, + query, + }, + '', + [200, 204], + ) + }) + } + + removeAllBucketNotification(bucketName: string, cb?: NoResultCallback) { + return this.setBucketNotification(bucketName, new NotificationConfig(), cb) + } + + // in the S3 provider + getBucketNotification(bucketName: string, cb?: ResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'GET' + const query = 'notification' + return asCallbackFn(cb, async () => { + const res = await this.makeRequestAsync({ method, bucketName, query }) + const body = await readAsBuffer(res) + return xmlParsers.parseBucketNotification(body.toString()) + }) + } + + // Listens for bucket notifications. Returns an EventEmitter. + listenBucketNotification(bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix must be of type string') + } + if (!isString(suffix)) { + throw new TypeError('suffix must be of type string') + } + if (!isArray(events)) { + throw new TypeError('events must be of type Array') + } + const listener = new NotificationPoller(this, bucketName, prefix, suffix, events) + listener.start() + + return listener + } + + // Remove all the notification configurations in the S3 provider + setBucketNotification(bucketName: string, config: NotificationConfig, cb?: NoResultCallback): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isObject(config)) { + throw new TypeError('notification config should be of type "Object"') + } + if (!isFunction(cb)) { + throw new TypeError('callback should be of type "function"') + } + const method = 'PUT' + const query = 'notification' + const builder = new xml2js.Builder({ + rootName: 'NotificationConfiguration', + renderOpts: { pretty: false }, + headless: true, + }) + const payload = builder.buildObject(config) + return asCallbackFn(cb, async () => { + await this.makeRequestAsyncOmit({ method, bucketName, query }, payload) + }) + } + + // * `obj.lastModified` _Date_: modified time stamp + listObjectsV2(bucketName: string, prefix: string, recursive?: boolean, startAfter?: string) { + if (prefix === undefined) { + prefix = '' + } + if (recursive === undefined) { + recursive = false + } + if (startAfter === undefined) { + startAfter = '' + } + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidPrefix(prefix)) { + throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isBoolean(recursive)) { + throw new TypeError('recursive should be of type "boolean"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + // if recursive is false set delimiter to '/' + const delimiter = recursive ? '' : '/' + let continuationToken = '' + let objects: S3ListObject[] = [] + let ended = false + const readStream = new stream.Readable({ objectMode: true }) + readStream._read = () => { + // push one object per _read() + if (objects.length) { + readStream.push(objects.shift()) + return + } + if (ended) { + return readStream.push(null) + } + // if there are no objects to push do query for the next batch of objects + this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter!) + .on('error', (e) => readStream.emit('error', e)) + .on('data', (result) => { + if (result.isTruncated) { + continuationToken = result.nextContinuationToken + } else { + ended = true + } + objects = result.objects + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + readStream._read() + }) + } + return readStream + } + + // List the objects in the bucket using S3 ListObjects V2 + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`) + // * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`) + // * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`) + // + // __Return Value__ + // * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format: + // * `obj.name` _string_: name of the object + // * `obj.prefix` _string_: name of the object prefix + // * `obj.size` _number_: size of the object + // * `obj.etag` _string_: etag of the object + + // * `start-after` _string_: Specifies the key to start after when listing objects in a bucket. + listObjectsV2Query( + bucketName: string, + prefix: string, + continuationToken: string, + delimiter: string, + maxKeys: number, + startAfter: string, + ) { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isString(prefix)) { + throw new TypeError('prefix should be of type "string"') + } + if (!isString(continuationToken)) { + throw new TypeError('continuationToken should be of type "string"') + } + if (!isString(delimiter)) { + throw new TypeError('delimiter should be of type "string"') + } + if (!isNumber(maxKeys)) { + throw new TypeError('maxKeys should be of type "number"') + } + if (!isString(startAfter)) { + throw new TypeError('startAfter should be of type "string"') + } + const queries = [] + + // Call for listing objects v2 API + queries.push(`list-type=2`) + queries.push(`encoding-type=url`) + + // escape every value in query string, except maxKeys + queries.push(`prefix=${uriEscape(prefix)}`) + queries.push(`delimiter=${uriEscape(delimiter)}`) + + if (continuationToken) { + continuationToken = uriEscape(continuationToken) + queries.push(`continuation-token=${continuationToken}`) + } + // Set start-after + if (startAfter) { + startAfter = uriEscape(startAfter) + queries.push(`start-after=${startAfter}`) + } + // no need to escape maxKeys + if (maxKeys) { + if (maxKeys >= 1000) { + maxKeys = 1000 + } + queries.push(`max-keys=${maxKeys}`) + } + queries.sort() + let query = '' + if (queries.length > 0) { + query = `${queries.join('&')}` + } + const method = 'GET' + const transformer = transformers.getListObjectsV2Transformer() + this.makeRequestAsync({ method, bucketName, query }, '', [200], '', true).then( + (response) => { + pipesetup(response, transformer) + }, + (e) => { + return transformer.emit('error', e) + }, + ) + return transformer + } + + // Copy the object. + // + // __Arguments__ + // * `bucketName` _string_: name of the bucket + // * `objectName` _string_: name of the object + // * `srcObject` _string_: path of the source object to be copied + // * `conditions` _CopyConditions_: copy conditions that needs to be satisfied (optional, default `null`) + + // * `versionId` _string_: versionId of the object + putObject( + bucketName: string, + objectName: string, + stream: string | Buffer | stream.Readable, + sizeArg?: number, + metaDataArg?: MetaData, + callbackArg?: ResultCallback, + ): void | Promise { + if (!isValidBucketName(bucketName)) { + throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName) + } + if (!isValidObjectName(objectName)) { + throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`) + } + + let [[size, metaData = {}], callback] = findCallback< + [number | undefined, MetaData], + ResultCallback + >([sizeArg, metaDataArg, callbackArg]) + + // We'll need to shift arguments to the left because of metaData + // and size being optional. + if (isObject(size)) { + metaData = size + size = undefined + } + + // Ensures Metadata has appropriate prefix for A3 API + metaData = prependXAMZMeta(metaData) + if (typeof stream === 'string' || stream instanceof Buffer) { + // Adapts the non-stream interface into a stream. + if (size !== undefined) { + if (size !== Buffer.from(stream).length) { + throw new errors.InvalidArgumentError( + `size input and object length mismatch, object has length ${stream.length} but input size is ${size}`, + ) + } + } + size = Buffer.from(stream).length + } else if (!isReadableStream(stream)) { + throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"') + } + + if (!isOptionalFunction(callback)) { + throw new TypeError('callback should be of type "function"') + } + + if (isNumber(size) && size < 0) { + throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`) + } + + if (isNumber(size) && size > this.maxObjectSize) { + throw new TypeError(`size should not be more than ${this.maxObjectSize}`) + } + + const executor = async () => { + // Get the part size and forward that to the BlockStream. Default to the + // largest block size possible if necessary. + if (size === undefined) { + const statSize = await getContentLength(stream) + if (statSize !== null) { + size = statSize + } + } + + if (!isNumber(size)) { + // Backward compatibility + size = this.maxObjectSize + } + + const partSize = this.calculatePartSize(size) + + if (typeof stream === 'string' || Buffer.isBuffer(stream) || size <= this.partSize) { + const uploader = this.getUploader(bucketName, objectName, metaData, false) + const buf = isReadableStream(stream) ? await readAsBuffer(stream) : Buffer.from(stream) + const { md5sum, sha256sum } = transformers.hashBinary(buf, this.enableSHA256) + return uploader(buf, buf.length, sha256sum, md5sum) + } + + return uploadStream({ + client: this, + stream: isReadableStream(stream) ? stream : readableStream(stream), + partSize, + bucketName, + objectName, + metaData, + }) + } + + return asCallback(callback, executor()) + } +} + +async function getContentLength(s: stream.Readable | Buffer | string): Promise { + const length = (s as unknown as Record).length as number | undefined + if (isNumber(length)) { + return length + } + + // property of fs.ReadStream + const filePath = (s as unknown as Record).path as string | undefined + if (filePath) { + const stat = await fsp.lstat(filePath) + return stat.size + } + + // property of fs.ReadStream + const fd = (s as unknown as Record).fd as number | null | undefined + + if (fd) { + const stat = await fsp.fstat(fd) + return stat.size + } + + return null +} diff --git a/src/upload.ts b/src/upload.ts new file mode 100644 index 00000000..e69de29b diff --git a/src/xml-parsers.js b/src/xml-parsers.ts similarity index 59% rename from src/xml-parsers.js rename to src/xml-parsers.ts index 447ec898..c1ae330e 100644 --- a/src/xml-parsers.js +++ b/src/xml-parsers.ts @@ -14,52 +14,52 @@ * limitations under the License. */ -import crc32 from 'buffer-crc32' +import * as newCrc32 from 'crc-32' import { XMLParser } from 'fast-xml-parser' -import _ from 'lodash' import * as errors from './errors.ts' +import type { MetaData, RETENTION_MODES } from './helpers.ts' import { isObject, parseXml, - readableStream, RETENTION_VALIDITY_UNITS, sanitizeETag, sanitizeObjectKey, SelectResults, toArray, -} from './helpers.js' +} from './helpers.ts' +import type { BucketItemCopy, BucketItemFromList, Retention, UploadID } from './type.ts' -// Parse XML and return information as Javascript types const fxp = new XMLParser() +// Parse XML and return information as Javascript types // parse error XML response -export function parseError(xml, headerInfo) { - var xmlErr = {} - var xmlObj = fxp.parse(xml) +export function parseError(xml: string, headerInfo: Record) { + let xmlErr = {} + const xmlObj = fxp.parse(xml) if (xmlObj.Error) { xmlErr = xmlObj.Error } - var e = new errors.S3Error() - _.each(xmlErr, (value, key) => { + const e = new errors.S3Error() as unknown as Record + Object.entries(xmlErr).forEach(([key, value]) => { e[key.toLowerCase()] = value }) - _.each(headerInfo, (value, key) => { + Object.entries(headerInfo).forEach(([key, value]) => { e[key] = value }) + return e } // parse XML response for copy object -export function parseCopyObject(xml) { - var result = { +export function parseCopyObject(xml: string): BucketItemCopy { + const result: { etag: string; lastModified?: Date } = { etag: '', - lastModified: '', } - var xmlobj = parseXml(xml) + let xmlobj = parseXml(xml) if (!xmlobj.CopyObjectResult) { throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"') } @@ -80,14 +80,23 @@ export function parseCopyObject(xml) { } // parse XML response for listing in-progress multipart uploads -export function parseListMultipart(xml) { - var result = { - uploads: [], - prefixes: [], +export function parseListMultipart(xml: string) { + const result = { + uploads: [] as { + key: string + uploadId: UploadID + initiator: unknown + owner: unknown + storageClass: unknown + initiated: unknown + }[], + prefixes: [] as { prefix: string }[], isTruncated: false, + nextKeyMarker: undefined, + nextUploadIdMarker: undefined, } - var xmlobj = parseXml(xml) + let xmlobj = parseXml(xml) if (!xmlobj.ListMultipartUploadsResult) { throw new errors.InvalidXMLError('Missing tag: "ListMultipartUploadsResult"') @@ -105,18 +114,19 @@ export function parseListMultipart(xml) { if (xmlobj.CommonPrefixes) { toArray(xmlobj.CommonPrefixes).forEach((prefix) => { - result.prefixes.push({ prefix: sanitizeObjectKey(toArray(prefix.Prefix)[0]) }) + // @ts-expect-error index check + result.prefixes.push({ prefix: sanitizeObjectKey(toArray(prefix.Prefix)[0]) }) }) } if (xmlobj.Upload) { toArray(xmlobj.Upload).forEach((upload) => { - var key = upload.Key - var uploadId = upload.UploadId - var initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } - var owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } - var storageClass = upload.StorageClass - var initiated = new Date(upload.Initiated) + const key = upload.Key + const uploadId = upload.UploadId + const initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName } + const owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName } + const storageClass = upload.StorageClass + const initiated = new Date(upload.Initiated) result.uploads.push({ key, uploadId, initiator, owner, storageClass, initiated }) }) } @@ -124,9 +134,9 @@ export function parseListMultipart(xml) { } // parse XML response to list all the owned buckets -export function parseListBucket(xml) { - var result = [] - var xmlobj = parseXml(xml) +export function parseListBucket(xml: string): BucketItemFromList[] { + const result: BucketItemFromList[] = [] + let xmlobj = parseXml(xml) if (!xmlobj.ListAllMyBucketsResult) { throw new errors.InvalidXMLError('Missing tag: "ListAllMyBucketsResult"') @@ -136,8 +146,8 @@ export function parseListBucket(xml) { if (xmlobj.Buckets) { if (xmlobj.Buckets.Bucket) { toArray(xmlobj.Buckets.Bucket).forEach((bucket) => { - var name = bucket.Name - var creationDate = new Date(bucket.CreationDate) + const name = bucket.Name + const creationDate = new Date(bucket.CreationDate) result.push({ name, creationDate }) }) } @@ -146,33 +156,31 @@ export function parseListBucket(xml) { } // parse XML response for bucket notification -export function parseBucketNotification(xml) { - var result = { - TopicConfiguration: [], - QueueConfiguration: [], - CloudFunctionConfiguration: [], +export function parseBucketNotification(xml: string): any { + const result = { + TopicConfiguration: [] as unknown[], + QueueConfiguration: [] as unknown[], + CloudFunctionConfiguration: [] as unknown[], } // Parse the events list - var genEvents = function (events) { - var result = [] + const genEvents = function (events: any) { + const result = [] if (events) { - toArray(events).forEach((s3event) => { - result.push(s3event) - }) + result.push(...toArray(events)) } return result } // Parse all filter rules - var genFilterRules = function (filters) { - var result = [] + const genFilterRules = function (filters: any) { + const result: { Name: string; Value: string }[] = [] if (filters) { filters = toArray(filters) if (filters[0].S3Key) { filters[0].S3Key = toArray(filters[0].S3Key) if (filters[0].S3Key[0].FilterRule) { toArray(filters[0].S3Key[0].FilterRule).forEach((rule) => { - var Name = toArray(rule.Name)[0] - var Value = toArray(rule.Value)[0] + const Name = toArray(rule.Name)[0] + const Value = toArray(rule.Value)[0] result.push({ Name, Value }) }) } @@ -181,36 +189,36 @@ export function parseBucketNotification(xml) { return result } - var xmlobj = parseXml(xml) + let xmlobj = parseXml(xml) xmlobj = xmlobj.NotificationConfiguration // Parse all topic configurations in the xml if (xmlobj.TopicConfiguration) { toArray(xmlobj.TopicConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var Topic = toArray(config.Topic)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) + const Id = toArray(config.Id)[0] + const Topic = toArray(config.Topic)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) result.TopicConfiguration.push({ Id, Topic, Event, Filter }) }) } // Parse all topic configurations in the xml if (xmlobj.QueueConfiguration) { toArray(xmlobj.QueueConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var Queue = toArray(config.Queue)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) + const Id = toArray(config.Id)[0] + const Queue = toArray(config.Queue)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) result.QueueConfiguration.push({ Id, Queue, Event, Filter }) }) } // Parse all QueueConfiguration arrays if (xmlobj.CloudFunctionConfiguration) { toArray(xmlobj.CloudFunctionConfiguration).forEach((config) => { - var Id = toArray(config.Id)[0] - var CloudFunction = toArray(config.CloudFunction)[0] - var Event = genEvents(config.Event) - var Filter = genFilterRules(config.Filter) + const Id = toArray(config.Id)[0] + const CloudFunction = toArray(config.CloudFunction)[0] + const Event = genEvents(config.Event) + const Filter = genFilterRules(config.Filter) result.CloudFunctionConfiguration.push({ Id, CloudFunction, Event, Filter }) }) } @@ -219,18 +227,24 @@ export function parseBucketNotification(xml) { } // parse XML response for bucket region -export function parseBucketRegion(xml) { +export function parseBucketRegion(xml: string) { // return region information return parseXml(xml).LocationConstraint } +export type Part = { + part: number + lastModified?: Date + etag: string +} + // parse XML response for list parts of an in progress multipart upload -export function parseListParts(xml) { - var xmlobj = parseXml(xml) - var result = { +export function parseListParts(xml: string): { isTruncated: boolean; marker: number | undefined; parts: Part[] } { + let xmlobj = parseXml(xml) + const result: { isTruncated: boolean; marker: number | undefined; parts: Part[] } = { isTruncated: false, parts: [], - marker: undefined, + marker: undefined as number | undefined, } if (!xmlobj.ListPartsResult) { throw new errors.InvalidXMLError('Missing tag: "ListPartsResult"') @@ -240,13 +254,13 @@ export function parseListParts(xml) { result.isTruncated = xmlobj.IsTruncated } if (xmlobj.NextPartNumberMarker) { - result.marker = +toArray(xmlobj.NextPartNumberMarker)[0] + result.marker = toArray(xmlobj.NextPartNumberMarker)[0] } if (xmlobj.Part) { toArray(xmlobj.Part).forEach((p) => { - var part = +toArray(p.PartNumber)[0] - var lastModified = new Date(p.LastModified) - var etag = p.ETag.replace(/^"/g, '') + const part = +toArray(p.PartNumber)[0] + const lastModified = new Date(p.LastModified) + const etag = p.ETag.replace(/^"/g, '') .replace(/"$/g, '') .replace(/^"/g, '') .replace(/"$/g, '') @@ -259,8 +273,8 @@ export function parseListParts(xml) { } // parse XML response when a new multipart upload is initiated -export function parseInitiateMultipart(xml) { - var xmlobj = parseXml(xml) +export function parseInitiateMultipart(xml: string) { + let xmlobj = parseXml(xml) if (!xmlobj.InitiateMultipartUploadResult) { throw new errors.InvalidXMLError('Missing tag: "InitiateMultipartUploadResult"') @@ -273,14 +287,24 @@ export function parseInitiateMultipart(xml) { throw new errors.InvalidXMLError('Missing tag: "UploadId"') } +export type MultipartResult = + | { errCode: string; errMessage: string } + | { + errCode?: undefined // this help TS to narrow type + etag: string + key: string + bucket: string + location: string + } + // parse XML response when a multipart upload is completed -export function parseCompleteMultipart(xml) { - var xmlobj = parseXml(xml).CompleteMultipartUploadResult +export function parseCompleteMultipart(xml: string) { + const xmlobj = parseXml(xml).CompleteMultipartUploadResult if (xmlobj.Location) { - var location = toArray(xmlobj.Location)[0] - var bucket = toArray(xmlobj.Bucket)[0] - var key = xmlobj.Key - var etag = xmlobj.ETag.replace(/^"/g, '') + const location = toArray(xmlobj.Location)[0] + const bucket = toArray(xmlobj.Bucket)[0] + const key = xmlobj.Key + const etag = xmlobj.ETag.replace(/^"/g, '') .replace(/"$/g, '') .replace(/^"/g, '') .replace(/"$/g, '') @@ -291,20 +315,31 @@ export function parseCompleteMultipart(xml) { } // Complete Multipart can return XML Error after a 200 OK response if (xmlobj.Code && xmlobj.Message) { - var errCode = toArray(xmlobj.Code)[0] - var errMessage = toArray(xmlobj.Message)[0] + const errCode = toArray(xmlobj.Code)[0] + const errMessage = toArray(xmlobj.Message)[0] return { errCode, errMessage } } } -const formatObjInfo = (content, opts = {}) => { - let { Key, LastModified, ETag, Size, VersionId, IsLatest } = content +type ListedObject = { + Key: string + LastModified: string + ETag: string + Size: number + VersionId?: string + IsLatest?: boolean +} + +const formatObjInfo = (content: ListedObject, opts: { IsDeleteMarker?: boolean } = {}) => { + const { Key, LastModified, ETag, Size, VersionId, IsLatest } = content if (!isObject(opts)) { opts = {} } + // @ts-expect-error index check const name = sanitizeObjectKey(toArray(Key)[0]) + // @ts-expect-error index check const lastModified = new Date(toArray(LastModified)[0]) const etag = sanitizeETag(toArray(ETag)[0]) @@ -319,17 +354,52 @@ const formatObjInfo = (content, opts = {}) => { } } +export type S3ListObject = + | { prefix: string; size: number } + | { name: string; size: number } // sometime api return this, not sure if it's valid + | { + name: string + lastModified: Date + etag: string + size: number + isDeleteMarker?: boolean + isLatest?: boolean + } + +type ListObjectResponse = { + nextMarker?: string + versionIdMarker?: string + objects: S3ListObject[] + isTruncated: boolean + nextContinuationToken?: string +} + // parse XML response for list objects in a bucket -export function parseListObjects(xml) { - var result = { +export function parseListObjects(xml: string) { + const result: ListObjectResponse = { objects: [], isTruncated: false, } let isTruncated = false let nextMarker, nextVersionKeyMarker - const xmlobj = parseXml(xml) + const xmlobj = parseXml(xml) as { + ListBucketResult?: { + CommonPrefixes: { Prefix: string } + IsTruncated: boolean + NextMarker?: string + Contents: Array<{ Key: string; LastModified: string; ETag: string; Size: number }> + } + ListVersionsResult?: { + CommonPrefixes: unknown + NextKeyMarker?: string + NextVersionIdMarker?: string + Version: Array + DeleteMarker?: Array + IsTruncated: boolean + } + } - const parseCommonPrefixesEntity = (responseEntity) => { + const parseCommonPrefixesEntity = (responseEntity: any) => { if (responseEntity) { toArray(responseEntity).forEach((commonPrefix) => { result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 }) @@ -337,7 +407,9 @@ export function parseListObjects(xml) { } } + // https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjects.html const listBucketResult = xmlobj.ListBucketResult + // https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectVersions.html const listVersionsResult = xmlobj.ListVersionsResult if (listBucketResult) { @@ -346,9 +418,9 @@ export function parseListObjects(xml) { } if (listBucketResult.Contents) { toArray(listBucketResult.Contents).forEach((content) => { - const name = sanitizeObjectKey(toArray(content.Key)[0]) - const lastModified = new Date(toArray(content.LastModified)[0]) - const etag = sanitizeETag(toArray(content.ETag)[0]) + const name = sanitizeObjectKey(content.Key) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) const size = content.Size result.objects.push({ name, lastModified, etag, size }) }) @@ -393,12 +465,25 @@ export function parseListObjects(xml) { } // parse XML response for list objects v2 in a bucket -export function parseListObjectsV2(xml) { - var result = { +export function parseListObjectsV2(xml: string) { + const result: { + objects: ( + | { prefix: string; size: number } + | { + name: string + lastModified: Date + etag: string + size: number + } + )[] + isTruncated: boolean + nextContinuationToken?: string + } = { objects: [], isTruncated: false, } - var xmlobj = parseXml(xml) + + let xmlobj = parseXml(xml) if (!xmlobj.ListBucketResult) { throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') } @@ -411,10 +496,10 @@ export function parseListObjectsV2(xml) { } if (xmlobj.Contents) { toArray(xmlobj.Contents).forEach((content) => { - var name = sanitizeObjectKey(toArray(content.Key)[0]) - var lastModified = new Date(content.LastModified) - var etag = sanitizeETag(content.ETag) - var size = content.Size + const name = sanitizeObjectKey(toArray(content.Key)[0]) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) + const size = content.Size result.objects.push({ name, lastModified, etag, size }) }) } @@ -426,13 +511,26 @@ export function parseListObjectsV2(xml) { return result } -// parse XML response for list objects v2 with metadata in a bucket -export function parseListObjectsV2WithMetadata(xml) { - var result = { +export function parseListObjectsV2WithMetadata(xml: string) { + const result: { + objects: ( + | { prefix: string; size: number } + | { + name: string + lastModified: Date + etag: string + size: number + metadata: MetaData | null + } + )[] + isTruncated: boolean + nextContinuationToken?: string + } = { objects: [], isTruncated: false, } - var xmlobj = parseXml(xml) + + let xmlobj = parseXml(xml) if (!xmlobj.ListBucketResult) { throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"') } @@ -446,11 +544,11 @@ export function parseListObjectsV2WithMetadata(xml) { if (xmlobj.Contents) { toArray(xmlobj.Contents).forEach((content) => { - var name = sanitizeObjectKey(content.Key) - var lastModified = new Date(content.LastModified) - var etag = sanitizeETag(content.ETag) - var size = content.Size - var metadata + const name = sanitizeObjectKey(content.Key) + const lastModified = new Date(content.LastModified) + const etag = sanitizeETag(content.ETag) + const size = content.Size + let metadata if (content.UserMetadata != null) { metadata = toArray(content.UserMetadata)[0] } else { @@ -468,12 +566,12 @@ export function parseListObjectsV2WithMetadata(xml) { return result } -export function parseBucketVersioningConfig(xml) { - var xmlObj = parseXml(xml) +export function parseBucketVersioningConfig(xml: string) { + const xmlObj = parseXml(xml) return xmlObj.VersioningConfiguration } -export function parseTagging(xml) { +export function parseTagging(xml: string) { const xmlObj = parseXml(xml) let result = [] if (xmlObj.Tagging && xmlObj.Tagging.TagSet && xmlObj.Tagging.TagSet.Tag) { @@ -488,14 +586,21 @@ export function parseTagging(xml) { return result } -export function parseLifecycleConfig(xml) { +export function parseLifecycleConfig(xml: string) { const xmlObj = parseXml(xml) return xmlObj.LifecycleConfiguration } -export function parseObjectLockConfig(xml) { +export type ObjectLockConfig = { + mode?: keyof typeof RETENTION_MODES + objectLockEnabled?: 'Enabled' + unit?: 'Years' | 'Days' + validity?: number +} + +export function parseObjectLockConfig(xml: string): ObjectLockConfig | undefined { const xmlObj = parseXml(xml) - let lockConfigResult = {} + let lockConfigResult: ObjectLockConfig = {} if (xmlObj.ObjectLockConfiguration) { lockConfigResult = { objectLockEnabled: xmlObj.ObjectLockConfiguration.ObjectLockEnabled, @@ -523,43 +628,45 @@ export function parseObjectLockConfig(xml) { } } -export function parseObjectRetentionConfig(xml) { +export function parseObjectRetentionConfig(xml: string) { const xmlObj = parseXml(xml) const retentionConfig = xmlObj.Retention return { mode: retentionConfig.Mode, retainUntilDate: retentionConfig.RetainUntilDate, - } + } as Retention } -export function parseBucketEncryptionConfig(xml) { - let encConfig = parseXml(xml) - return encConfig +export function parseBucketEncryptionConfig(xml: string) { + return parseXml(xml) } -export function parseReplicationConfig(xml) { + +export function parseReplicationConfig(xml: string) { const xmlObj = parseXml(xml) + const replicationConfig = { ReplicationConfiguration: { role: xmlObj.ReplicationConfiguration.Role, rules: toArray(xmlObj.ReplicationConfiguration.Rule), }, } + return replicationConfig } -export function parseObjectLegalHoldConfig(xml) { +export function parseObjectLegalHoldConfig(xml: string) { const xmlObj = parseXml(xml) return xmlObj.LegalHold } -export function uploadPartParser(xml) { +export function uploadPartParser(xml: string) { const xmlObj = parseXml(xml) const respEl = xmlObj.CopyPartResult return respEl } -export function removeObjectsParser(xml) { +export function removeObjectsParser(xml: string) { const xmlObj = parseXml(xml) if (xmlObj.DeleteResult && xmlObj.DeleteResult.Error) { // return errors as array always. as the response is object in case of single object passed in removeObjects @@ -568,39 +675,56 @@ export function removeObjectsParser(xml) { return [] } -export function parseSelectObjectContentResponse(res) { +class ReadableBuffer { + private buf: Buffer + + public readLoc: number + + constructor(buf: Buffer) { + this.buf = buf + this.readLoc = 0 + } + + read(size: number): Buffer { + const sub = this.buf.subarray(this.readLoc, this.readLoc + size) + this.readLoc += size + return sub + } + + notEnd(): boolean { + return this.readLoc < this.buf.length + } +} + +export function parseSelectObjectContentResponse(res: Buffer): SelectResults { // extractHeaderType extracts the first half of the header message, the header type. - function extractHeaderType(stream) { - const headerNameLen = Buffer.from(stream.read(1)).readUInt8() - const headerNameWithSeparator = Buffer.from(stream.read(headerNameLen)).toString() - const splitBySeparator = (headerNameWithSeparator || '').split(':') - const headerName = splitBySeparator.length >= 1 ? splitBySeparator[1] : '' - return headerName + function extractHeaderType(stream: ReadableBuffer): string { + const headerNameLen = stream.read(1).readUInt8() + const headerNameWithSeparator = stream.read(headerNameLen).toString() + + const [_, name] = headerNameWithSeparator.split(':') + return name || '' } - function extractHeaderValue(stream) { - const bodyLen = Buffer.from(stream.read(2)).readUInt16BE() - const bodyName = Buffer.from(stream.read(bodyLen)).toString() - return bodyName + function extractHeaderValue(stream: ReadableBuffer) { + const bodyLen = stream.read(2).readUInt16BE() + return stream.read(bodyLen).toString() } const selectResults = new SelectResults({}) // will be returned - const responseStream = readableStream(res) // convert byte array to a readable responseStream - while (responseStream._readableState.length) { - // Top level responseStream read tracker. - let msgCrcAccumulator // accumulate from start of the message till the message crc start. - - const totalByteLengthBuffer = Buffer.from(responseStream.read(4)) - msgCrcAccumulator = crc32(totalByteLengthBuffer) + const responseStream = new ReadableBuffer(res) // convert byte array to a readable responseStream + while (responseStream.notEnd()) { + const totalByteLengthBuffer = responseStream.read(4) + let msgCrcAccumulator = newCrc32.buf(totalByteLengthBuffer) - const headerBytesBuffer = Buffer.from(responseStream.read(4)) - msgCrcAccumulator = crc32(headerBytesBuffer, msgCrcAccumulator) + const headerBytesBuffer = responseStream.read(4) + msgCrcAccumulator = newCrc32.buf(headerBytesBuffer, msgCrcAccumulator) - const calculatedPreludeCrc = msgCrcAccumulator.readInt32BE() // use it to check if any CRC mismatch in header itself. + const calculatedPreludeCrc = msgCrcAccumulator // use it to check if any CRC mismatch in header itself. - const preludeCrcBuffer = Buffer.from(responseStream.read(4)) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc) - msgCrcAccumulator = crc32(preludeCrcBuffer, msgCrcAccumulator) + const preludeCrcBuffer = responseStream.read(4) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc) + msgCrcAccumulator = newCrc32.buf(preludeCrcBuffer, msgCrcAccumulator) const totalMsgLength = totalByteLengthBuffer.readInt32BE() const headerLength = headerBytesBuffer.readInt32BE() @@ -613,40 +737,41 @@ export function parseSelectObjectContentResponse(res) { ) } - const headers = {} + const headers: Record = {} + if (headerLength > 0) { - const headerBytes = Buffer.from(responseStream.read(headerLength)) - msgCrcAccumulator = crc32(headerBytes, msgCrcAccumulator) - const headerReaderStream = readableStream(headerBytes) - while (headerReaderStream._readableState.length) { - let headerTypeName = extractHeaderType(headerReaderStream) + const headerBytes = responseStream.read(headerLength) + msgCrcAccumulator = newCrc32.buf(headerBytes, msgCrcAccumulator) + const headerReaderStream = new ReadableBuffer(headerBytes) + while (headerReaderStream.notEnd()) { + const headerTypeName = extractHeaderType(headerReaderStream) headerReaderStream.read(1) // just read and ignore it. headers[headerTypeName] = extractHeaderValue(headerReaderStream) } } - let payloadStream + let payloadStream: ReadableBuffer const payLoadLength = totalMsgLength - headerLength - 16 if (payLoadLength > 0) { - const payLoadBuffer = Buffer.from(responseStream.read(payLoadLength)) - msgCrcAccumulator = crc32(payLoadBuffer, msgCrcAccumulator) + const payLoadBuffer = responseStream.read(payLoadLength) + msgCrcAccumulator = newCrc32.buf(payLoadBuffer, msgCrcAccumulator) // read the checksum early and detect any mismatch so we can avoid unnecessary further processing. - const messageCrcByteValue = Buffer.from(responseStream.read(4)).readInt32BE() - const calculatedCrc = msgCrcAccumulator.readInt32BE() + const messageCrcByteValue = responseStream.read(4).readInt32BE() + const calculatedCrc = msgCrcAccumulator // Handle message CRC Error if (messageCrcByteValue !== calculatedCrc) { throw new Error( `Message Checksum Mismatch, Message CRC of ${messageCrcByteValue} does not equal expected CRC of ${calculatedCrc}`, ) } - payloadStream = readableStream(payLoadBuffer) + payloadStream = new ReadableBuffer(payLoadBuffer) } const messageType = headers['message-type'] switch (messageType) { case 'error': { - const errorMessage = headers['error-code'] + ':"' + headers['error-message'] + '"' + const errorMessage = `${headers['error-code']}:"${headers['error-message']}"` throw new Error(errorMessage) } case 'event': { @@ -660,6 +785,8 @@ export function parseSelectObjectContentResponse(res) { } case 'Records': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore const readData = payloadStream.read(payLoadLength) selectResults.setRecords(readData) break @@ -669,6 +796,8 @@ export function parseSelectObjectContentResponse(res) { { switch (contentType) { case 'text/xml': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore const progressData = payloadStream.read(payLoadLength) selectResults.setProgress(progressData.toString()) break @@ -684,6 +813,8 @@ export function parseSelectObjectContentResponse(res) { { switch (contentType) { case 'text/xml': { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore const statsData = payloadStream.read(payLoadLength) selectResults.setStats(statsData.toString()) break @@ -706,4 +837,6 @@ export function parseSelectObjectContentResponse(res) { } // Event End } // messageType End } // Top Level Stream End + + throw new Error('unexpected end of stream') } diff --git a/tests/functional/functional-tests.js b/tests/functional/functional-tests.js index d0ab714b..e468cb11 100644 --- a/tests/functional/functional-tests.js +++ b/tests/functional/functional-tests.js @@ -23,30 +23,23 @@ import * as stream from 'node:stream' import * as url from 'node:url' import async from 'async' -import chai from 'chai' +import { assert } from 'chai' import _ from 'lodash' import { step } from 'mocha-steps' import splitFile from 'split-file' import superagent from 'superagent' import * as uuid from 'uuid' -import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.js' -import { - CopyDestinationOptions, - CopySourceOptions, - DEFAULT_REGION, - getVersionId, - isArray, - removeDirAndFiles, -} from '../../src/helpers.js' -import * as minio from '../../src/minio.js' - -const assert = chai.assert +import { AssumeRoleProvider } from '../../src/AssumeRoleProvider.ts' +import { CopyDestinationOptions, CopySourceOptions, DEFAULT_REGION, getVersionId, isArray } from '../../src/helpers.ts' +import { removeDirAndFiles } from '../../src/helpers.ts' +import * as minio from '../../src/minio.ts' +import { Client } from '../../src/minio.ts' const isWindowsPlatform = process.platform === 'win32' describe('functional tests', function () { - this.timeout(30 * 60 * 1000) + this.timeout(10 * 60 * 1000) var clientConfigParams = {} var region_conf_env = process.env['MINIO_REGION'] @@ -75,7 +68,7 @@ describe('functional tests', function () { console.error(`Error: SECRET_KEY Environment variable is not set`) process.exit(1) } - clientConfigParams.useSSL = enable_https_env == '1' + clientConfigParams.useSSL = enable_https_env === '1' } else { // If credentials aren't given, default to play.min.io. clientConfigParams.endPoint = 'play.min.io' @@ -95,10 +88,10 @@ describe('functional tests', function () { // a directory with files to read from, i.e. /mint/data. var dataDir = process.env['MINT_DATA_DIR'] - var client = new minio.Client(clientConfigParams) + var client = new Client(clientConfigParams) var usEastConfig = clientConfigParams usEastConfig.region = server_region - var clientUsEastRegion = new minio.Client(usEastConfig) + var clientUsEastRegion = new Client(usEastConfig) var traceStream // FUNCTIONAL_TEST_TRACE env variable contains the path to which trace @@ -109,7 +102,9 @@ describe('functional tests', function () { if (trace_func_test_file_path === 'process.stdout') { traceStream = process.stdout } else { - traceStream = fs.createWriteStream(trace_func_test_file_path, { flags: 'a' }) + traceStream = fs.createWriteStream(trace_func_test_file_path, { + flags: 'a', + }) } traceStream.write('====================================\n') client.traceOn(traceStream) @@ -140,7 +135,9 @@ describe('functional tests', function () { var _5mbmd5 = crypto.createHash('md5').update(_5mb).digest('hex') // create new http agent to check requests release sockets - var httpAgent = (clientConfigParams.useSSL ? https : http).Agent({ keepAlive: true }) + var httpAgent = (clientConfigParams.useSSL ? https : http).Agent({ + keepAlive: true, + }) client.setRequestOptions({ agent: httpAgent }) var metaData = { 'Content-Type': 'text/html', @@ -338,7 +335,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUpload, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUpload, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -352,7 +349,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) var tmpFileUploadWithExt = `${tmpDir}/${_100kbObjectName}.txt` step( @@ -361,7 +358,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUploadWithExt, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, metaData, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -377,7 +374,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) step( `fPutObject(bucketName, objectName, filePath, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, filePath: ${tmpFileUploadWithExt}_`, @@ -385,7 +382,7 @@ describe('functional tests', function () { fs.writeFileSync(tmpFileUploadWithExt, _100kb) client.fPutObject(bucketName, _100kbObjectName, tmpFileUploadWithExt, done) }, - ) + ).timeout(5000) step(`statObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, (done) => { client.statObject(bucketName, _100kbObjectName, (e, stat) => { @@ -399,7 +396,7 @@ describe('functional tests', function () { } done() }) - }) + }).timeout(5000) step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}, metaData:${metaData}_`, @@ -407,7 +404,7 @@ describe('functional tests', function () { var stream = readableStream(_100kb) client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, metaData, done) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}, stream:100kb, size:${_100kb.length}_`, @@ -415,7 +412,7 @@ describe('functional tests', function () { var stream = readableStream(_100kb) client.putObject(bucketName, _100kbObjectName, stream, _100kb.length, done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectName}_`, @@ -435,14 +432,14 @@ describe('functional tests', function () { }) }) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_`, (done) => { - client.putObject(bucketName, _100kbObjectBufferName, _100kb, '', done) + client.putObject(bucketName, _100kbObjectBufferName, _100kb, done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName, callback)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, @@ -462,7 +459,7 @@ describe('functional tests', function () { }) }) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, metaData)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, stream:100kb_, metaData:{}`, @@ -472,7 +469,7 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:0, length=1024_`, @@ -485,7 +482,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024, length=1024_`, @@ -505,7 +502,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getPartialObject(bucketName, objectName, offset, length, cb)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}, offset:1024`, @@ -524,7 +521,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `getObject(bucketName, objectName)_bucketName:${bucketName}, objectName:${_100kbObjectBufferName}_`, @@ -537,7 +534,7 @@ describe('functional tests', function () { }) .catch(done) }, - ) + ).timeout(5000) step( `putObject(bucketName, objectName, stream, metadata, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, @@ -552,7 +549,7 @@ describe('functional tests', function () { }, 100) }) }, - ) + ).timeout(5000) step(`getObject(bucketName, objectName, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, (done) => { var hash = crypto.createHash('md5') @@ -836,7 +833,7 @@ describe('functional tests', function () { step( `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${bucketName}, objectName:${_65mbObjectName}, metaData:${metaData}`, (done) => { - client.initiateNewMultipartUpload(bucketName, _65mbObjectName, metaData, done) + client.initiateNewMultipartUpload(bucketName, _65mbObjectName, metaData).finally(done) }, ) step( @@ -946,15 +943,12 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `fPutObject(bucketName, objectName, filePath, metaData)_bucketName:${bucketName}, objectName:${_65mbObjectName}, filePath:${tmpFileUpload}_`, - (done) => { - client - .fPutObject(bucketName, _65mbObjectName, tmpFileUpload) - .then(() => done()) - .catch(done) + async () => { + await client.fPutObject(bucketName, _65mbObjectName, tmpFileUpload) }, ) @@ -966,7 +960,7 @@ describe('functional tests', function () { .then(() => done()) .catch(done) }, - ) + ).timeout(5000) step( `removeObject(bucketName, objectName, filePath, callback)_bucketName:${bucketName}, objectName:${_65mbObjectName}_`, @@ -975,7 +969,7 @@ describe('functional tests', function () { fs.unlinkSync(tmpFileDownload) client.removeObject(bucketName, _65mbObjectName, done) }, - ) + ).timeout(5000) }) describe('fGetObject-resume', () => { var localFile = `${tmpDir}/${_5mbObjectName}` @@ -1915,12 +1909,12 @@ describe('functional tests', function () { poller.removeAllListeners('notification') // clean up object now client.removeObject(bucketName, objectName, done) - }, 11 * 1000) + }, 10 * 1000) }) }, ) - }) - }) + }).timeout(120 * 1000) + }).timeout(120 * 1000) describe('Bucket Versioning API', () => { // Isolate the bucket/object for easy debugging and tracking. @@ -1952,7 +1946,7 @@ describe('functional tests', function () { }) }) - step('Suspend versioning on a bucket', (done) => { + step('Suspend versioning on a bucket', (done) => { client.setBucketVersioning(versionedBucketName, { Status: 'Suspended' }, (err) => { if (err && err.code === 'NotImplemented') { return done() @@ -2228,91 +2222,102 @@ describe('functional tests', function () { step( `putObject(bucketName, objectName, stream, size, metaData, callback)_bucketName:${versionedBucketName}, stream:1b, size:1_Create ${listObjectsNum} objects`, (done) => { - if (isVersioningSupported) { - let count = 1 - objVersionIdCounter.forEach(() => { - client.putObject( - versionedBucketName, - objNameWithPrefix, - readableStream(_1byte), - _1byte.length, - {}, - (e, data) => { - objArray.push(data) - if (count === objVersionIdCounter.length) { - done() - } - count += 1 - }, - ) - }) - } else { + if (!isVersioningSupported) { done() + return } + + let count = 1 + objVersionIdCounter.forEach(() => { + client.putObject( + versionedBucketName, + objNameWithPrefix, + readableStream(_1byte), + _1byte.length, + {}, + (e, data) => { + if (e) { + done(e) + } + objArray.push(data) + if (count === objVersionIdCounter.length) { + done() + } + count += 1 + }, + ) + }) }, ) step( `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: '', recursive:true_`, (done) => { - if (isVersioningSupported) { - client - .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray.length, listPrefixArray.length)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data) - }) - } else { + if (!isVersioningSupported) { done() + return } + + client + .listObjects(versionedBucketName, '', true, { + IncludeVersion: true, + }) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray.length, listPrefixArray.length)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data) + }) }, ) step( `listObjects(bucketName, prefix, recursive)_bucketName:${versionedBucketName}, prefix: ${prefixName}, recursive:true_`, (done) => { - if (isVersioningSupported) { - listPrefixArray = [] - client - .listObjects(versionedBucketName, prefixName, true, { IncludeVersion: true }) - .on('error', done) - .on('end', () => { - if (_.isEqual(objArray.length, listPrefixArray.length)) { - return done() - } - return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) - }) - .on('data', (data) => { - listPrefixArray.push(data) - }) - } else { + if (!isVersioningSupported) { done() + return } + + listPrefixArray = [] + client + .listObjects(versionedBucketName, prefixName, true, { + IncludeVersion: true, + }) + .on('error', done) + .on('end', () => { + if (_.isEqual(objArray.length, listPrefixArray.length)) { + return done() + } + return done(new Error(`listObjects lists ${listPrefixArray.length} objects, expected ${listObjectsNum}`)) + }) + .on('data', (data) => { + listPrefixArray.push(data) + }) }, ) step( `removeObject(bucketName, objectName, removeOpts)_bucketName:${versionedBucketName}_Remove ${listObjectsNum} objects`, (done) => { - if (isVersioningSupported) { - let count = 1 - listPrefixArray.forEach((item) => { - client.removeObject(versionedBucketName, item.name, { versionId: item.versionId }, () => { - if (count === listPrefixArray.length) { - done() - } - count += 1 - }) - }) - } else { + if (!isVersioningSupported) { done() + return } + + let count = 1 + listPrefixArray.forEach((item) => { + client.removeObject(versionedBucketName, item.name, { versionId: item.versionId }, () => { + if (count === listPrefixArray.length) { + done() + } + count += 1 + }) + }) }, ) }) @@ -2381,7 +2386,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .listObjects(versionedBucketName, '', true, { IncludeVersion: true }) + .listObjects(versionedBucketName, '', true, { + IncludeVersion: true, + }) .on('error', done) .on('end', () => { if (_.isEqual(2, objVersionList.length)) { @@ -3169,7 +3176,10 @@ describe('functional tests', function () { client.removeObject( objRetentionBucket, retentionObjName, - { versionId: versionId, governanceBypass: true }, + { + versionId: versionId, + governanceBypass: true, + }, () => { done() }, @@ -3412,7 +3422,10 @@ describe('functional tests', function () { client.setObjectLegalHold( objLegalHoldBucketName, objLegalHoldObjName, - { status: 'ON', versionId: versionId }, + { + status: 'ON', + versionId: versionId, + }, () => { done() }, @@ -3443,7 +3456,10 @@ describe('functional tests', function () { client.setObjectLegalHold( objLegalHoldBucketName, objLegalHoldObjName, - { status: 'OFF', versionId: versionId }, + { + status: 'OFF', + versionId: versionId, + }, () => { done() }, @@ -3474,7 +3490,10 @@ describe('functional tests', function () { client.removeObject( objLegalHoldBucketName, objLegalHoldObjName, - { versionId: versionId, governanceBypass: true }, + { + versionId: versionId, + governanceBypass: true, + }, () => { done() }, @@ -3788,9 +3807,11 @@ describe('functional tests', function () { secretKey: client.secretKey, }) - const aRoleConf = Object.assign({}, clientConfigParams, { credentialsProvider: assumeRoleProvider }) + const aRoleConf = Object.assign({}, clientConfigParams, { + credentialsProvider: assumeRoleProvider, + }) - const assumeRoleClient = new minio.Client(aRoleConf) + const assumeRoleClient = new Client(aRoleConf) assumeRoleClient.region = server_region describe('Put an Object', function () { @@ -3950,7 +3971,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .removeObject(bucketToTestMultipart, _100kbObjectName, { versionId: versionedObjectRes.versionId }) + .removeObject(bucketToTestMultipart, _100kbObjectName, { + versionId: versionedObjectRes.versionId, + }) .then(() => done()) .catch(done) } else { @@ -3992,7 +4015,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .removeObject(bucketToTestMultipart, _65mbObjectName, { versionId: versionedMultiPartObjectRes.versionId }) + .removeObject(bucketToTestMultipart, _65mbObjectName, { + versionId: versionedMultiPartObjectRes.versionId, + }) .then(() => done()) .catch(done) } else { @@ -4277,7 +4302,8 @@ describe('functional tests', function () { }, ) }) - describe('Test listIncompleteUploads (Multipart listing) with special characters', () => { + describe('Test listIncompleteUploads (Multipart listing) with special characters', function () { + this.timeout(30 * 1000) const specialCharPrefix = 'SpecialMenùäöüexPrefix/' const objectNameSpecialChars = 'äöüex.pdf' const spObjWithPrefix = `${specialCharPrefix}${objectNameSpecialChars}` @@ -4289,7 +4315,7 @@ describe('functional tests', function () { step( `initiateNewMultipartUpload(bucketName, objectName, metaData, cb)_bucketName:${spBucketName}, objectName:${spObjWithPrefix}, metaData:${metaData}`, (done) => { - client.initiateNewMultipartUpload(spBucketName, spObjWithPrefix, metaData, done) + client.initiateNewMultipartUpload(spBucketName, spObjWithPrefix, metaData).finally(done) }, ) @@ -4392,13 +4418,22 @@ describe('functional tests', function () { `selectObjectContent(bucketName, objectName, selectOpts)_bucketName:${selObjContentBucket}, objectName:${selObject}`, (done) => { const selectOpts = { - expression: 'SELECT * FROM s3object s where s."Name" = \'Jane\'', + expression: `SELECT * FROM s3object s where s."Name" = 'Jane'`, expressionType: 'SQL', inputSerialization: { - CSV: { FileHeaderInfo: 'Use', RecordDelimiter: '\n', FieldDelimiter: ',' }, + CSV: { + FileHeaderInfo: 'Use', + RecordDelimiter: '\n', + FieldDelimiter: ',', + }, CompressionType: 'NONE', }, - outputSerialization: { CSV: { RecordDelimiter: '\n', FieldDelimiter: ',' } }, + outputSerialization: { + CSV: { + RecordDelimiter: '\n', + FieldDelimiter: ',', + }, + }, requestProgress: { Enabled: true }, } @@ -4598,7 +4633,9 @@ describe('functional tests', function () { (done) => { if (isVersioningSupported) { client - .listObjects(fdPrefixBucketName, '/my-prefix', true, { IncludeVersion: true }) + .listObjects(fdPrefixBucketName, '/my-prefix', true, { + IncludeVersion: true, + }) .on('error', done) .on('end', () => { if (_.isEqual(0, objVersionList.length)) { diff --git a/tests/unit/test.js b/tests/unit/test.js index ceaf4256..c199452d 100644 --- a/tests/unit/test.js +++ b/tests/unit/test.js @@ -14,7 +14,7 @@ * limitations under the License. */ -import * as Stream from 'node:stream' +import Stream from 'node:stream' import { assert } from 'chai' import Nock from 'nock' @@ -29,8 +29,9 @@ import { makeDateLong, makeDateShort, partsRequired, -} from '../../src/helpers.js' -import * as Minio from '../../src/minio.js' +} from '../../src/helpers.ts' +import * as Minio from '../../src/minio.ts' +import { Client } from '../../src/minio.ts' const Package = { version: 'development' } @@ -243,7 +244,7 @@ describe('Client', function () { } }) }) - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -252,7 +253,7 @@ describe('Client', function () { }) describe('new client', () => { it('should work with https', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -260,7 +261,7 @@ describe('Client', function () { assert.equal(client.port, 443) }) it('should override port with http', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -270,7 +271,7 @@ describe('Client', function () { assert.equal(client.port, 9000) }) it('should work with http', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -279,7 +280,7 @@ describe('Client', function () { assert.equal(client.port, 80) }) it('should override port with https', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, accessKey: 'accesskey', @@ -289,7 +290,7 @@ describe('Client', function () { }) it('should fail with url', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'http://localhost:9000', accessKey: 'accesskey', secretKey: 'secretkey', @@ -300,7 +301,7 @@ describe('Client', function () { }) it('should fail with alphanumeric', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'localhost##$@3', accessKey: 'accesskey', secretKey: 'secretkey', @@ -311,7 +312,7 @@ describe('Client', function () { }) it('should fail with no url', (done) => { try { - new Minio.Client({ + new Client({ accessKey: 'accesskey', secretKey: 'secretkey', }) @@ -321,7 +322,7 @@ describe('Client', function () { }) it('should fail with bad port', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'localhost', port: -1, accessKey: 'accesskey', @@ -333,7 +334,7 @@ describe('Client', function () { }) it('should fail when secure param is passed', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'localhost', secure: false, port: 9000, @@ -346,7 +347,7 @@ describe('Client', function () { }) it('should fail when secure param is passed', (done) => { try { - new Minio.Client({ + new Client({ endPoint: 'localhost', secure: true, port: 9000, @@ -362,7 +363,7 @@ describe('Client', function () { describe('presigned-get', () => { it('should not generate presigned url with no access key', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, useSSL: false, @@ -383,7 +384,7 @@ describe('Client', function () { describe('presigned-put', () => { it('should not generate presigned url with no access key', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', port: 9000, useSSL: false, @@ -436,7 +437,7 @@ describe('Client', function () { }) describe('User Agent', () => { it('should have a default user agent', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -444,7 +445,7 @@ describe('Client', function () { assert.equal(`MinIO (${process.platform}; ${process.arch}) minio-js/${Package.version}`, client.userAgent) }) it('should set user agent', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -456,7 +457,7 @@ describe('Client', function () { ) }) it('should set user agent without comments', () => { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -469,7 +470,7 @@ describe('Client', function () { }) it('should not set user agent without name', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -481,7 +482,7 @@ describe('Client', function () { }) it('should not set user agent with empty name', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -493,7 +494,7 @@ describe('Client', function () { }) it('should not set user agent without version', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -505,7 +506,7 @@ describe('Client', function () { }) it('should not set user agent with empty version', (done) => { try { - var client = new Minio.Client({ + var client = new Client({ endPoint: 'localhost', accessKey: 'accesskey', secretKey: 'secretkey', @@ -960,40 +961,6 @@ describe('Client', function () { } }) }) - describe('Put Object Tags', () => { - it('should fail on null object', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on empty object', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, {}, function () {}) - } catch (e) { - done() - } - }) - it('should fail on non object tags', (done) => { - try { - client.putObjectTagging('my-bucket-name', null, 'non-obj-tag', function () {}) - } catch (e) { - done() - } - }) - it('should fail if tags are more than 50 on an object', (done) => { - const _50_plus_key_tags = {} - for (let i = 0; i < 51; i += 1) { - _50_plus_key_tags[i] = i - } - try { - client.putObjectTagging('my-bucket-name', null, _50_plus_key_tags, function () {}) - } catch (e) { - done() - } - }) - }) describe('Get Object Tags', () => { it('should fail on invalid bucket', (done) => { try { diff --git a/types/minio.d.ts b/types/minio.d.ts index dbd85e74..e69de29b 100644 --- a/types/minio.d.ts +++ b/types/minio.d.ts @@ -1,775 +0,0 @@ -// imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/93cfb0ec069731dcdfc31464788613f7cddb8192/types/minio/index.d.ts - -import { EventEmitter } from 'node:events' -import type { RequestOptions } from 'node:https' -import type { Readable as ReadableStream } from 'node:stream' - -// Exports only from typings -export type Region = - | 'us-east-1' - | 'us-west-1' - | 'us-west-2' - | 'eu-west-1' - | 'eu-central-1' - | 'ap-southeast-1' - | 'ap-northeast-1' - | 'ap-southeast-2' - | 'sa-east-1' - | 'cn-north-1' - | string -export type NotificationEvent = - | 's3:ObjectCreated:*' - | 's3:ObjectCreated:Put' - | 's3:ObjectCreated:Post' - | 's3:ObjectCreated:Copy' - | 's3:ObjectCreated:CompleteMultipartUpload' - | 's3:ObjectRemoved:*' - | 's3:ObjectRemoved:Delete' - | 's3:ObjectRemoved:DeleteMarkerCreated' - | 's3:ReducedRedundancyLostObject' - | 's3:TestEvent' - | 's3:ObjectRestore:Post' - | 's3:ObjectRestore:Completed' - | 's3:Replication:OperationFailedReplication' - | 's3:Replication:OperationMissedThreshold' - | 's3:Replication:OperationReplicatedAfterThreshold' - | 's3:Replication:OperationNotTracked' - | string -export type Mode = 'COMPLIANCE' | 'GOVERNANCE' -export type LockUnit = 'Days' | 'Years' -export type LegalHoldStatus = 'ON' | 'OFF' -export type NoResultCallback = (error: Error | null) => void -export type ResultCallback = (error: Error | null, result: T) => void -export type VersioningConfig = Record -export type TagList = Record -export type EmptyObject = Record -export type VersionIdentificator = Pick -export type Lifecycle = LifecycleConfig | null | '' -export type Lock = LockConfig | EmptyObject -export type Encryption = EncryptionConfig | EmptyObject -export type Retention = RetentionOptions | EmptyObject -export type IsoDate = string - -export interface ClientOptions { - endPoint: string - accessKey: string - secretKey: string - useSSL?: boolean | undefined - port?: number | undefined - region?: Region | undefined - transport?: any - sessionToken?: string | undefined - partSize?: number | undefined - pathStyle?: boolean | undefined -} - -export interface BucketItemFromList { - name: string - creationDate: Date -} - -export interface BucketItemCopy { - etag: string - lastModified: Date -} - -export interface BucketItem { - name: string - prefix: string - size: number - etag: string - lastModified: Date -} - -export interface BucketItemWithMetadata extends BucketItem { - metadata: ItemBucketMetadata | ItemBucketMetadataList -} - -export interface BucketItemStat { - size: number - etag: string - lastModified: Date - metaData: ItemBucketMetadata -} - -export interface IncompleteUploadedBucketItem { - key: string - uploadId: string - size: number -} - -export interface BucketStream extends ReadableStream { - on(event: 'data', listener: (item: T) => void): this - - on(event: 'end' | 'pause' | 'readable' | 'resume' | 'close', listener: () => void): this - - on(event: 'error', listener: (err: Error) => void): this - - on(event: string | symbol, listener: (...args: any[]) => void): this -} - -export interface PostPolicyResult { - postURL: string - formData: { - [key: string]: any - } -} - -export interface MetadataItem { - Key: string - Value: string -} - -export interface ItemBucketMetadataList { - Items: MetadataItem[] -} - -export interface ItemBucketMetadata { - [key: string]: any -} - -export interface UploadedObjectInfo { - etag: string - versionId: string | null -} - -export interface Tag { - Key: string - Value: string -} - -export interface LifecycleConfig { - Rule: LifecycleRule[] -} - -export interface LifecycleRule { - [key: string]: any -} - -export interface LockConfig { - mode: Mode - unit: LockUnit - validity: number -} - -export interface EncryptionConfig { - Rule: EncryptionRule[] -} - -export interface EncryptionRule { - [key: string]: any -} - -export interface ReplicationConfig { - role: string - rules: [] -} - -export interface ReplicationConfig { - [key: string]: any -} - -export interface RetentionOptions { - versionId: string - mode?: Mode - retainUntilDate?: IsoDate - governanceBypass?: boolean -} - -export interface LegalHoldOptions { - versionId: string - status: LegalHoldStatus -} - -export interface InputSerialization { - CompressionType?: 'NONE' | 'GZIP' | 'BZIP2' - CSV?: { - AllowQuotedRecordDelimiter?: boolean - Comments?: string - FieldDelimiter?: string - FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE' - QuoteCharacter?: string - QuoteEscapeCharacter?: string - RecordDelimiter?: string - } - JSON?: { - Type: 'DOCUMENT' | 'LINES' - } - Parquet?: EmptyObject -} - -export interface OutputSerialization { - CSV?: { - FieldDelimiter?: string - QuoteCharacter?: string - QuoteEscapeCharacter?: string - QuoteFields?: string - RecordDelimiter?: string - } - JSON?: { - RecordDelimiter?: string - } -} - -export interface SelectOptions { - expression: string - expressionType?: string - inputSerialization: InputSerialization - outputSerialization: OutputSerialization - requestProgress?: { Enabled: boolean } - scanRange?: { Start: number; End: number } -} - -export interface SourceObjectStats { - size: number - metaData: string - lastModicied: Date - versionId: string - etag: string -} - -// No need to export this. But without it - linter error. -export class TargetConfig { - setId(id: any): void - - addEvent(newEvent: any): void - - addFilterSuffix(suffix: any): void - - addFilterPrefix(prefix: any): void -} - -export interface MakeBucketOpt { - ObjectLocking: boolean -} - -export interface RemoveOptions { - versionId?: string - governanceBypass?: boolean -} - -// Exports from library -export class Client { - constructor(options: ClientOptions) - - // Bucket operations - makeBucket(bucketName: string, region: Region, makeOpts: MakeBucketOpt, callback: NoResultCallback): void - makeBucket(bucketName: string, region: Region, callback: NoResultCallback): void - makeBucket(bucketName: string, callback: NoResultCallback): void - makeBucket(bucketName: string, region?: Region, makeOpts?: MakeBucketOpt): Promise - - listBuckets(callback: ResultCallback): void - listBuckets(): Promise - - bucketExists(bucketName: string, callback: ResultCallback): void - bucketExists(bucketName: string): Promise - - removeBucket(bucketName: string, callback: NoResultCallback): void - removeBucket(bucketName: string): Promise - - listObjects(bucketName: string, prefix?: string, recursive?: boolean): BucketStream - - listObjectsV2(bucketName: string, prefix?: string, recursive?: boolean, startAfter?: string): BucketStream - - listIncompleteUploads( - bucketName: string, - prefix?: string, - recursive?: boolean, - ): BucketStream - - getBucketVersioning(bucketName: string, callback: ResultCallback): void - getBucketVersioning(bucketName: string): Promise - - setBucketVersioning(bucketName: string, versioningConfig: any, callback: NoResultCallback): void - setBucketVersioning(bucketName: string, versioningConfig: any): Promise - - getBucketTagging(bucketName: string, callback: ResultCallback): void - getBucketTagging(bucketName: string): Promise - - setBucketTagging(bucketName: string, tags: TagList, callback: NoResultCallback): void - setBucketTagging(bucketName: string, tags: TagList): Promise - - removeBucketTagging(bucketName: string, callback: NoResultCallback): void - removeBucketTagging(bucketName: string): Promise - - setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle, callback: NoResultCallback): void - setBucketLifecycle(bucketName: string, lifecycleConfig: Lifecycle): Promise - - getBucketLifecycle(bucketName: string, callback: ResultCallback): void - getBucketLifecycle(bucketName: string): Promise - - removeBucketLifecycle(bucketName: string, callback: NoResultCallback): void - removeBucketLifecycle(bucketName: string): Promise - - setObjectLockConfig(bucketName: string, callback: NoResultCallback): void - setObjectLockConfig(bucketName: string, lockConfig: Lock, callback: NoResultCallback): void - setObjectLockConfig(bucketName: string, lockConfig?: Lock): Promise - - getObjectLockConfig(bucketName: string, callback: ResultCallback): void - getObjectLockConfig(bucketName: string): Promise - - getBucketEncryption(bucketName: string, callback: ResultCallback): void - getBucketEncryption(bucketName: string): Promise - - setBucketEncryption(bucketName: string, encryptionConfig: Encryption, callback: NoResultCallback): void - setBucketEncryption(bucketName: string, encryptionConfig: Encryption): Promise - - removeBucketEncryption(bucketName: string, callback: NoResultCallback): void - removeBucketEncryption(bucketName: string): Promise - - setBucketReplication(bucketName: string, replicationConfig: ReplicationConfig, callback: NoResultCallback): void - setBucketReplication(bucketName: string, replicationConfig: ReplicationConfig): Promise - - getBucketReplication(bucketName: string, callback: ResultCallback): void - getBucketReplication(bucketName: string): Promise - - removeBucketReplication(bucketName: string, callback: NoResultCallback): void - removeBucketReplication(bucketName: string): Promise - - // Object operations - getObject(bucketName: string, objectName: string, callback: ResultCallback): void - getObject(bucketName: string, objectName: string): Promise - - getPartialObject( - bucketName: string, - objectName: string, - offset: number, - callback: ResultCallback, - ): void - getPartialObject( - bucketName: string, - objectName: string, - offset: number, - length: number, - callback: ResultCallback, - ): void - getPartialObject(bucketName: string, objectName: string, offset: number, length?: number): Promise - - fGetObject(bucketName: string, objectName: string, filePath: string, callback: NoResultCallback): void - fGetObject(bucketName: string, objectName: string, filePath: string): Promise - - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size: number, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size: number, - metaData: ItemBucketMetadata, - callback: ResultCallback, - ): void - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - size?: number, - metaData?: ItemBucketMetadata, - ): Promise - putObject( - bucketName: string, - objectName: string, - stream: ReadableStream | Buffer | string, - metaData?: ItemBucketMetadata, - ): Promise - - fPutObject( - bucketName: string, - objectName: string, - filePath: string, - metaData: ItemBucketMetadata, - callback: ResultCallback, - ): void - fPutObject( - bucketName: string, - objectName: string, - filePath: string, - metaData?: ItemBucketMetadata, - ): Promise - - copyObject( - bucketName: string, - objectName: string, - sourceObject: string, - conditions: CopyConditions, - callback: ResultCallback, - ): void - copyObject( - bucketName: string, - objectName: string, - sourceObject: string, - conditions: CopyConditions, - ): Promise - - statObject(bucketName: string, objectName: string, callback: ResultCallback): void - statObject(bucketName: string, objectName: string): Promise - - removeObject(bucketName: string, objectName: string, removeOpts: RemoveOptions, callback: NoResultCallback): void - removeObject(bucketName: string, objectName: string, callback: NoResultCallback): void - removeObject(bucketName: string, objectName: string, removeOpts?: RemoveOptions): Promise - - removeObjects(bucketName: string, objectsList: string[], callback: NoResultCallback): void - removeObjects(bucketName: string, objectsList: string[]): Promise - - removeIncompleteUpload(bucketName: string, objectName: string, callback: NoResultCallback): void - removeIncompleteUpload(bucketName: string, objectName: string): Promise - - putObjectRetention(bucketName: string, objectName: string, callback: NoResultCallback): void - putObjectRetention( - bucketName: string, - objectName: string, - retentionOptions: Retention, - callback: NoResultCallback, - ): void - putObjectRetention(bucketName: string, objectName: string, retentionOptions?: Retention): Promise - - getObjectRetention( - bucketName: string, - objectName: string, - options: VersionIdentificator, - callback: ResultCallback, - ): void - getObjectRetention(bucketName: string, objectName: string, options: VersionIdentificator): Promise - - // It seems, putObjectTagging is deprecated in favor or setObjectTagging - there is no such a method in the library source code - /** - * @deprecated Use setObjectTagging instead. - */ - putObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void - /** - * @deprecated Use setObjectTagging instead. - */ - putObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions: VersionIdentificator, - callback: NoResultCallback, - ): void - /** - * @deprecated Use setObjectTagging instead. - */ - putObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions?: VersionIdentificator, - ): Promise - - setObjectTagging(bucketName: string, objectName: string, tags: TagList, callback: NoResultCallback): void - setObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions: VersionIdentificator, - callback: NoResultCallback, - ): void - setObjectTagging( - bucketName: string, - objectName: string, - tags: TagList, - putOptions?: VersionIdentificator, - ): Promise - - removeObjectTagging(bucketName: string, objectName: string, callback: NoResultCallback): void - removeObjectTagging( - bucketName: string, - objectName: string, - removeOptions: VersionIdentificator, - callback: NoResultCallback, - ): void - removeObjectTagging(bucketName: string, objectName: string, removeOptions?: VersionIdentificator): Promise - - getObjectTagging(bucketName: string, objectName: string, callback: ResultCallback): void - getObjectTagging( - bucketName: string, - objectName: string, - getOptions: VersionIdentificator, - callback: ResultCallback, - ): void - getObjectTagging(bucketName: string, objectName: string, getOptions?: VersionIdentificator): Promise - - getObjectLegalHold(bucketName: string, objectName: string, callback: ResultCallback): void - getObjectLegalHold( - bucketName: string, - objectName: string, - getOptions: VersionIdentificator, - callback: ResultCallback, - ): void - getObjectLegalHold( - bucketName: string, - objectName: string, - getOptions?: VersionIdentificator, - ): Promise - - setObjectLegalHold(bucketName: string, objectName: string, callback: NoResultCallback): void - setObjectLegalHold( - bucketName: string, - objectName: string, - setOptions: LegalHoldOptions, - callback: NoResultCallback, - ): void - setObjectLegalHold(bucketName: string, objectName: string, setOptions?: LegalHoldOptions): Promise - - composeObject( - destObjConfig: CopyDestinationOptions, - sourceObjList: CopySourceOptions[], - callback: ResultCallback, - ): void - composeObject(destObjConfig: CopyDestinationOptions, sourceObjList: CopySourceOptions[]): Promise - - selectObjectContent( - bucketName: string, - objectName: string, - selectOpts: SelectOptions, - callback: NoResultCallback, - ): void - selectObjectContent(bucketName: string, objectName: string, selectOpts: SelectOptions): Promise - - // Presigned operations - presignedUrl(httpMethod: string, bucketName: string, objectName: string, callback: ResultCallback): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - reqParams: { [key: string]: any }, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry: number, - reqParams: { [key: string]: any }, - requestDate: Date, - callback: ResultCallback, - ): void - presignedUrl( - httpMethod: string, - bucketName: string, - objectName: string, - expiry?: number, - reqParams?: { [key: string]: any }, - requestDate?: Date, - ): Promise - - presignedGetObject(bucketName: string, objectName: string, callback: ResultCallback): void - presignedGetObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry: number, - respHeaders: { [key: string]: any }, - callback: ResultCallback, - ): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry: number, - respHeaders: { [key: string]: any }, - requestDate: Date, - callback: ResultCallback, - ): void - presignedGetObject( - bucketName: string, - objectName: string, - expiry?: number, - respHeaders?: { [key: string]: any }, - requestDate?: Date, - ): Promise - - presignedPutObject(bucketName: string, objectName: string, callback: ResultCallback): void - presignedPutObject(bucketName: string, objectName: string, expiry: number, callback: ResultCallback): void - presignedPutObject(bucketName: string, objectName: string, expiry?: number): Promise - - presignedPostPolicy(policy: PostPolicy, callback: ResultCallback): void - presignedPostPolicy(policy: PostPolicy): Promise - - // Bucket Policy & Notification operations - getBucketNotification(bucketName: string, callback: ResultCallback): void - getBucketNotification(bucketName: string): Promise - - setBucketNotification( - bucketName: string, - bucketNotificationConfig: NotificationConfig, - callback: NoResultCallback, - ): void - setBucketNotification(bucketName: string, bucketNotificationConfig: NotificationConfig): Promise - - removeAllBucketNotification(bucketName: string, callback: NoResultCallback): void - removeAllBucketNotification(bucketName: string): Promise - - getBucketPolicy(bucketName: string, callback: ResultCallback): void - getBucketPolicy(bucketName: string): Promise - - setBucketPolicy(bucketName: string, bucketPolicy: string, callback: NoResultCallback): void - setBucketPolicy(bucketName: string, bucketPolicy: string): Promise - - listenBucketNotification( - bucketName: string, - prefix: string, - suffix: string, - events: NotificationEvent[], - ): NotificationPoller - - // Custom Settings - setS3TransferAccelerate(endpoint: string): void - - // Other - newPostPolicy(): PostPolicy - - setRequestOptions(options: RequestOptions): void - - // Minio extensions that aren't necessary present for Amazon S3 compatible storage servers - extensions: { - listObjectsV2WithMetadata( - bucketName: string, - prefix?: string, - recursive?: boolean, - startAfter?: string, - ): BucketStream - } -} - -export namespace Policy { - const NONE: 'none' - const READONLY: 'readonly' - const WRITEONLY: 'writeonly' - const READWRITE: 'readwrite' -} - -export class CopyConditions { - setModified(date: Date): void - - setUnmodified(date: Date): void - - setMatchETag(etag: string): void - - setMatchETagExcept(etag: string): void -} - -export class PostPolicy { - setExpires(date: Date): void - - setKey(objectName: string): void - - setKeyStartsWith(prefix: string): void - - setBucket(bucketName: string): void - - setContentType(type: string): void - - setContentTypeStartsWith(prefix: string): void - - setContentLengthRange(min: number, max: number): void - - setContentDisposition(disposition: string): void - - setUserMetaData(metadata: Record): void -} - -export class NotificationPoller extends EventEmitter { - stop(): void - - start(): void - - // must to be public? - checkForChanges(): void -} - -export class NotificationConfig { - add(target: TopicConfig | QueueConfig | CloudFunctionConfig): void -} - -export class TopicConfig extends TargetConfig { - constructor(arn: string) -} - -export class QueueConfig extends TargetConfig { - constructor(arn: string) -} - -export class CloudFunctionConfig extends TargetConfig { - constructor(arn: string) -} - -export class CopySourceOptions { - constructor(options: { - Bucket: string - Object: string - VersionID?: string - MatchETag?: string - NoMatchETag?: string - MatchModifiedSince?: string - MatchUnmodifiedSince?: string - MatchRange?: boolean - Start?: number - End?: number - Encryption?: { - type: string - SSEAlgorithm?: string - KMSMasterKeyID?: string - } - }) - - getHeaders(): Record - - validate(): boolean -} - -export class CopyDestinationOptions { - constructor(options: { - Bucket: string - Object: string - Encryption?: { - type: string - SSEAlgorithm?: string - KMSMasterKeyID?: string - } - UserMetadata?: Record - UserTags?: Record | string - LegalHold?: LegalHoldStatus - RetainUntilDate?: string - Mode?: Mode - }) - - getHeaders(): Record - - validate(): boolean -} - -export function buildARN( - partition: string, - service: string, - region: string, - accountId: string, - resource: string, -): string - -export const ObjectCreatedAll: NotificationEvent // s3:ObjectCreated:*' -export const ObjectCreatedPut: NotificationEvent // s3:ObjectCreated:Put -export const ObjectCreatedPost: NotificationEvent // s3:ObjectCreated:Post -export const ObjectCreatedCopy: NotificationEvent // s3:ObjectCreated:Copy -export const ObjectCreatedCompleteMultipartUpload: NotificationEvent // s3:ObjectCreated:CompleteMultipartUpload -export const ObjectRemovedAll: NotificationEvent // s3:ObjectRemoved:* -export const ObjectRemovedDelete: NotificationEvent // s3:ObjectRemoved:Delete -export const ObjectRemovedDeleteMarkerCreated: NotificationEvent // s3:ObjectRemoved:DeleteMarkerCreated -export const ObjectReducedRedundancyLostObject: NotificationEvent // s3:ReducedRedundancyLostObject