https://project.mdnd-it.cc/work_packages/94
This commit is contained in:
2025-08-23 04:25:28 +02:00
parent 725516ad6c
commit 19cfa031d0
25823 changed files with 1095587 additions and 2801760 deletions
+262
View File
@@ -0,0 +1,262 @@
import * as http from 'node:http'
import * as https from 'node:https'
import { URL, URLSearchParams } from 'node:url'
import { CredentialProvider } from './CredentialProvider.ts'
import { Credentials } from './Credentials.ts'
import { makeDateLong, parseXml, toSha256 } from './internal/helper.ts'
import { request } from './internal/request.ts'
import { readAsString } from './internal/response.ts'
import type { Transport } from './internal/type.ts'
import { signV4ByServiceName } from './signing.ts'
/**
* @see https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
*/
type CredentialResponse = {
ErrorResponse?: {
Error?: {
Code?: string
Message?: string
}
}
AssumeRoleResponse: {
AssumeRoleResult: {
Credentials: {
AccessKeyId: string
SecretAccessKey: string
SessionToken: string
Expiration: string
}
}
}
}
export interface AssumeRoleProviderOptions {
stsEndpoint: string
accessKey: string
secretKey: string
durationSeconds?: number
sessionToken?: string
policy?: string
region?: string
roleArn?: string
roleSessionName?: string
externalId?: string
token?: string
webIdentityToken?: string
action?: string
transportAgent?: http.Agent
}
const defaultExpirySeconds = 900
export class AssumeRoleProvider extends CredentialProvider {
private readonly stsEndpoint: URL
private readonly accessKey: string
private readonly secretKey: string
private readonly durationSeconds: number
private readonly policy?: string
private readonly region: string
private readonly roleArn?: string
private readonly roleSessionName?: string
private readonly externalId?: string
private readonly token?: string
private readonly webIdentityToken?: string
private readonly action: string
private _credentials: Credentials | null
private readonly expirySeconds: number
private accessExpiresAt = ''
private readonly transportAgent?: http.Agent
private readonly transport: Transport
constructor({
stsEndpoint,
accessKey,
secretKey,
durationSeconds = defaultExpirySeconds,
sessionToken,
policy,
region = '',
roleArn,
roleSessionName,
externalId,
token,
webIdentityToken,
action = 'AssumeRole',
transportAgent = undefined,
}: AssumeRoleProviderOptions) {
super({ accessKey, secretKey, sessionToken })
this.stsEndpoint = new URL(stsEndpoint)
this.accessKey = accessKey
this.secretKey = secretKey
this.policy = policy
this.region = region
this.roleArn = roleArn
this.roleSessionName = roleSessionName
this.externalId = externalId
this.token = token
this.webIdentityToken = webIdentityToken
this.action = action
this.durationSeconds = parseInt(durationSeconds as unknown as string)
let expirySeconds = this.durationSeconds
if (this.durationSeconds < defaultExpirySeconds) {
expirySeconds = defaultExpirySeconds
}
this.expirySeconds = expirySeconds // for calculating refresh of credentials.
// By default, nodejs uses a global agent if the 'agent' property
// is set to undefined. Otherwise, it's okay to assume the users
// know what they're doing if they specify a custom transport agent.
this.transportAgent = transportAgent
const isHttp: boolean = this.stsEndpoint.protocol === 'http:'
this.transport = isHttp ? http : https
/**
* Internal Tracking variables
*/
this._credentials = null
}
getRequestConfig(): {
requestOptions: http.RequestOptions
requestData: string
} {
const hostValue = this.stsEndpoint.hostname
const portValue = this.stsEndpoint.port
const qryParams = new URLSearchParams({ Action: this.action, Version: '2011-06-15' })
qryParams.set('DurationSeconds', this.expirySeconds.toString())
if (this.policy) {
qryParams.set('Policy', this.policy)
}
if (this.roleArn) {
qryParams.set('RoleArn', this.roleArn)
}
if (this.roleSessionName != null) {
qryParams.set('RoleSessionName', this.roleSessionName)
}
if (this.token != null) {
qryParams.set('Token', this.token)
}
if (this.webIdentityToken) {
qryParams.set('WebIdentityToken', this.webIdentityToken)
}
if (this.externalId) {
qryParams.set('ExternalId', this.externalId)
}
const urlParams = qryParams.toString()
const contentSha256 = toSha256(urlParams)
const date = new Date()
const requestOptions = {
hostname: hostValue,
port: portValue,
path: '/',
protocol: this.stsEndpoint.protocol,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'content-length': urlParams.length.toString(),
host: hostValue,
'x-amz-date': makeDateLong(date),
'x-amz-content-sha256': contentSha256,
} as Record<string, string>,
agent: this.transportAgent,
} satisfies http.RequestOptions
requestOptions.headers.authorization = signV4ByServiceName(
requestOptions,
this.accessKey,
this.secretKey,
this.region,
date,
contentSha256,
'sts',
)
return {
requestOptions,
requestData: urlParams,
}
}
async performRequest(): Promise<CredentialResponse> {
const { requestOptions, requestData } = this.getRequestConfig()
const res = await request(this.transport, requestOptions, requestData)
const body = await readAsString(res)
return parseXml(body)
}
parseCredentials(respObj: CredentialResponse): Credentials {
if (respObj.ErrorResponse) {
throw new Error(
`Unable to obtain credentials: ${respObj.ErrorResponse?.Error?.Code} ${respObj.ErrorResponse?.Error?.Message}`,
{ cause: respObj },
)
}
const {
AssumeRoleResponse: {
AssumeRoleResult: {
Credentials: {
AccessKeyId: accessKey,
SecretAccessKey: secretKey,
SessionToken: sessionToken,
Expiration: expiresAt,
},
},
},
} = respObj
this.accessExpiresAt = expiresAt
return new Credentials({ accessKey, secretKey, sessionToken })
}
async refreshCredentials(): Promise<Credentials> {
try {
const assumeRoleCredentials = await this.performRequest()
this._credentials = this.parseCredentials(assumeRoleCredentials)
} catch (err) {
throw new Error(`Failed to get Credentials: ${err}`, { cause: err })
}
return this._credentials
}
async getCredentials(): Promise<Credentials> {
if (this._credentials && !this.isAboutToExpire()) {
return this._credentials
}
this._credentials = await this.refreshCredentials()
return this._credentials
}
isAboutToExpire() {
const expiresAt = new Date(this.accessExpiresAt)
const provisionalExpiry = new Date(Date.now() + 1000 * 10) // check before 10 seconds.
return provisionalExpiry > expiresAt
}
}
// deprecated default export, please use named exports.
// keep for backward compatibility.
// eslint-disable-next-line import/no-default-export
export default AssumeRoleProvider
+54
View File
@@ -0,0 +1,54 @@
import { Credentials } from './Credentials.ts'
export class CredentialProvider {
private credentials: Credentials
constructor({ accessKey, secretKey, sessionToken }: { accessKey: string; secretKey: string; sessionToken?: string }) {
this.credentials = new Credentials({
accessKey,
secretKey,
sessionToken,
})
}
async getCredentials(): Promise<Credentials> {
return this.credentials.get()
}
setCredentials(credentials: Credentials) {
if (credentials instanceof Credentials) {
this.credentials = credentials
} else {
throw new Error('Unable to set Credentials. it should be an instance of Credentials class')
}
}
setAccessKey(accessKey: string) {
this.credentials.setAccessKey(accessKey)
}
getAccessKey() {
return this.credentials.getAccessKey()
}
setSecretKey(secretKey: string) {
this.credentials.setSecretKey(secretKey)
}
getSecretKey() {
return this.credentials.getSecretKey()
}
setSessionToken(sessionToken: string) {
this.credentials.setSessionToken(sessionToken)
}
getSessionToken() {
return this.credentials.getSessionToken()
}
}
// deprecated default export, please use named exports.
// keep for backward compatibility.
// eslint-disable-next-line import/no-default-export
export default CredentialProvider
+44
View File
@@ -0,0 +1,44 @@
export class Credentials {
public accessKey: string
public secretKey: string
public sessionToken?: string
constructor({ accessKey, secretKey, sessionToken }: { accessKey: string; secretKey: string; sessionToken?: string }) {
this.accessKey = accessKey
this.secretKey = secretKey
this.sessionToken = sessionToken
}
setAccessKey(accessKey: string) {
this.accessKey = accessKey
}
getAccessKey() {
return this.accessKey
}
setSecretKey(secretKey: string) {
this.secretKey = secretKey
}
getSecretKey() {
return this.secretKey
}
setSessionToken(sessionToken: string) {
this.sessionToken = sessionToken
}
getSessionToken() {
return this.sessionToken
}
get(): Credentials {
return this
}
}
// deprecated default export, please use named exports.
// keep for backward compatibility.
// eslint-disable-next-line import/no-default-export
export default Credentials
+234
View File
@@ -0,0 +1,234 @@
import * as fs from 'node:fs/promises'
import * as http from 'node:http'
import * as https from 'node:https'
import { URL, URLSearchParams } from 'node:url'
import { CredentialProvider } from './CredentialProvider.ts'
import { Credentials } from './Credentials.ts'
import { parseXml } from './internal/helper.ts'
import { request } from './internal/request.ts'
import { readAsString } from './internal/response.ts'
interface AssumeRoleResponse {
AssumeRoleWithWebIdentityResponse: {
AssumeRoleWithWebIdentityResult: {
Credentials: {
AccessKeyId: string
SecretAccessKey: string
SessionToken: string
Expiration: string
}
}
}
}
interface EcsCredentials {
AccessKeyID: string
SecretAccessKey: string
Token: string
Expiration: string
Code: string
Message: string
}
export interface IamAwsProviderOptions {
customEndpoint?: string
transportAgent?: http.Agent
}
export class IamAwsProvider extends CredentialProvider {
private readonly customEndpoint?: string
private _credentials: Credentials | null
private readonly transportAgent?: http.Agent
private accessExpiresAt = ''
constructor({ customEndpoint = undefined, transportAgent = undefined }: IamAwsProviderOptions) {
super({ accessKey: '', secretKey: '' })
this.customEndpoint = customEndpoint
this.transportAgent = transportAgent
/**
* Internal Tracking variables
*/
this._credentials = null
}
async getCredentials(): Promise<Credentials> {
if (!this._credentials || this.isAboutToExpire()) {
this._credentials = await this.fetchCredentials()
}
return this._credentials
}
private async fetchCredentials(): Promise<Credentials> {
try {
// check for IRSA (https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html)
const tokenFile = process.env.AWS_WEB_IDENTITY_TOKEN_FILE
if (tokenFile) {
return await this.fetchCredentialsUsingTokenFile(tokenFile)
}
// try with IAM role for EC2 instances (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html)
let tokenHeader = 'Authorization'
let token = process.env.AWS_CONTAINER_AUTHORIZATION_TOKEN
const relativeUri = process.env.AWS_CONTAINER_CREDENTIALS_RELATIVE_URI
const fullUri = process.env.AWS_CONTAINER_CREDENTIALS_FULL_URI
let url: URL
if (relativeUri) {
url = new URL(relativeUri, 'http://169.254.170.2')
} else if (fullUri) {
url = new URL(fullUri)
} else {
token = await this.fetchImdsToken()
tokenHeader = 'X-aws-ec2-metadata-token'
url = await this.getIamRoleNamedUrl(token)
}
return this.requestCredentials(url, tokenHeader, token)
} catch (err) {
throw new Error(`Failed to get Credentials: ${err}`, { cause: err })
}
}
private async fetchCredentialsUsingTokenFile(tokenFile: string): Promise<Credentials> {
const token = await fs.readFile(tokenFile, { encoding: 'utf8' })
const region = process.env.AWS_REGION
const stsEndpoint = new URL(region ? `https://sts.${region}.amazonaws.com` : 'https://sts.amazonaws.com')
const hostValue = stsEndpoint.hostname
const portValue = stsEndpoint.port
const qryParams = new URLSearchParams({
Action: 'AssumeRoleWithWebIdentity',
Version: '2011-06-15',
})
const roleArn = process.env.AWS_ROLE_ARN
if (roleArn) {
qryParams.set('RoleArn', roleArn)
const roleSessionName = process.env.AWS_ROLE_SESSION_NAME
qryParams.set('RoleSessionName', roleSessionName ? roleSessionName : Date.now().toString())
}
qryParams.set('WebIdentityToken', token)
qryParams.sort()
const requestOptions = {
hostname: hostValue,
port: portValue,
path: `${stsEndpoint.pathname}?${qryParams.toString()}`,
protocol: stsEndpoint.protocol,
method: 'POST',
headers: {},
agent: this.transportAgent,
} satisfies http.RequestOptions
const transport = stsEndpoint.protocol === 'http:' ? http : https
const res = await request(transport, requestOptions, null)
const body = await readAsString(res)
const assumeRoleResponse: AssumeRoleResponse = parseXml(body)
const creds = assumeRoleResponse.AssumeRoleWithWebIdentityResponse.AssumeRoleWithWebIdentityResult.Credentials
this.accessExpiresAt = creds.Expiration
return new Credentials({
accessKey: creds.AccessKeyId,
secretKey: creds.SecretAccessKey,
sessionToken: creds.SessionToken,
})
}
private async fetchImdsToken() {
const endpoint = this.customEndpoint ? this.customEndpoint : 'http://169.254.169.254'
const url = new URL('/latest/api/token', endpoint)
const requestOptions = {
hostname: url.hostname,
port: url.port,
path: `${url.pathname}${url.search}`,
protocol: url.protocol,
method: 'PUT',
headers: {
'X-aws-ec2-metadata-token-ttl-seconds': '21600',
},
agent: this.transportAgent,
} satisfies http.RequestOptions
const transport = url.protocol === 'http:' ? http : https
const res = await request(transport, requestOptions, null)
return await readAsString(res)
}
private async getIamRoleNamedUrl(token: string) {
const endpoint = this.customEndpoint ? this.customEndpoint : 'http://169.254.169.254'
const url = new URL('latest/meta-data/iam/security-credentials/', endpoint)
const roleName = await this.getIamRoleName(url, token)
return new URL(`${url.pathname}/${encodeURIComponent(roleName)}`, url.origin)
}
private async getIamRoleName(url: URL, token: string): Promise<string> {
const requestOptions = {
hostname: url.hostname,
port: url.port,
path: `${url.pathname}${url.search}`,
protocol: url.protocol,
method: 'GET',
headers: {
'X-aws-ec2-metadata-token': token,
},
agent: this.transportAgent,
} satisfies http.RequestOptions
const transport = url.protocol === 'http:' ? http : https
const res = await request(transport, requestOptions, null)
const body = await readAsString(res)
const roleNames = body.split(/\r\n|[\n\r\u2028\u2029]/)
if (roleNames.length === 0) {
throw new Error(`No IAM roles attached to EC2 service ${url}`)
}
return roleNames[0] as string
}
private async requestCredentials(url: URL, tokenHeader: string, token: string | undefined): Promise<Credentials> {
const headers: Record<string, string> = {}
if (token) {
headers[tokenHeader] = token
}
const requestOptions = {
hostname: url.hostname,
port: url.port,
path: `${url.pathname}${url.search}`,
protocol: url.protocol,
method: 'GET',
headers: headers,
agent: this.transportAgent,
} satisfies http.RequestOptions
const transport = url.protocol === 'http:' ? http : https
const res = await request(transport, requestOptions, null)
const body = await readAsString(res)
const ecsCredentials = JSON.parse(body) as EcsCredentials
if (!ecsCredentials.Code || ecsCredentials.Code != 'Success') {
throw new Error(`${url} failed with code ${ecsCredentials.Code} and message ${ecsCredentials.Message}`)
}
this.accessExpiresAt = ecsCredentials.Expiration
return new Credentials({
accessKey: ecsCredentials.AccessKeyID,
secretKey: ecsCredentials.SecretAccessKey,
sessionToken: ecsCredentials.Token,
})
}
private isAboutToExpire() {
const expiresAt = new Date(this.accessExpiresAt)
const provisionalExpiry = new Date(Date.now() + 1000 * 10) // 10 seconds leeway
return provisionalExpiry > expiresAt
}
}
// deprecated default export, please use named exports.
// keep for backward compatibility.
// eslint-disable-next-line import/no-default-export
export default IamAwsProvider
+120
View File
@@ -0,0 +1,120 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <reference lib="ES2022.Error" />
class ExtendableError extends Error {
constructor(message?: string, opt?: ErrorOptions) {
// error Option {cause?: unknown} is a 'nice to have',
// don't use it internally
super(message, opt)
// set error name, otherwise it's always 'Error'
this.name = this.constructor.name
}
}
/**
* AnonymousRequestError is generated for anonymous keys on specific
* APIs. NOTE: PresignedURL generation always requires access keys.
*/
export class AnonymousRequestError extends ExtendableError {}
/**
* InvalidArgumentError is generated for all invalid arguments.
*/
export class InvalidArgumentError extends ExtendableError {}
/**
* InvalidPortError is generated when a non integer value is provided
* for ports.
*/
export class InvalidPortError extends ExtendableError {}
/**
* InvalidEndpointError is generated when an invalid end point value is
* provided which does not follow domain standards.
*/
export class InvalidEndpointError extends ExtendableError {}
/**
* InvalidBucketNameError is generated when an invalid bucket name is
* provided which does not follow AWS S3 specifications.
* http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html
*/
export class InvalidBucketNameError extends ExtendableError {}
/**
* InvalidObjectNameError is generated when an invalid object name is
* provided which does not follow AWS S3 specifications.
* http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
*/
export class InvalidObjectNameError extends ExtendableError {}
/**
* AccessKeyRequiredError generated by signature methods when access
* key is not found.
*/
export class AccessKeyRequiredError extends ExtendableError {}
/**
* SecretKeyRequiredError generated by signature methods when secret
* key is not found.
*/
export class SecretKeyRequiredError extends ExtendableError {}
/**
* ExpiresParamError generated when expires parameter value is not
* well within stipulated limits.
*/
export class ExpiresParamError extends ExtendableError {}
/**
* InvalidDateError generated when invalid date is found.
*/
export class InvalidDateError extends ExtendableError {}
/**
* InvalidPrefixError generated when object prefix provided is invalid
* or does not conform to AWS S3 object key restrictions.
*/
export class InvalidPrefixError extends ExtendableError {}
/**
* InvalidBucketPolicyError generated when the given bucket policy is invalid.
*/
export class InvalidBucketPolicyError extends ExtendableError {}
/**
* IncorrectSizeError generated when total data read mismatches with
* the input size.
*/
export class IncorrectSizeError extends ExtendableError {}
/**
* InvalidXMLError generated when an unknown XML is found.
*/
export class InvalidXMLError extends ExtendableError {}
/**
* S3Error is generated for errors returned from S3 server.
* see getErrorTransformer for details
*/
export class S3Error extends ExtendableError {
code?: string
region?: string
}
export class IsValidBucketNameError extends ExtendableError {}
+354
View File
@@ -0,0 +1,354 @@
import * as fs from 'node:fs'
import * as path from 'node:path'
import * as querystring from 'query-string'
import * as errors from './errors.ts'
import {
getEncryptionHeaders,
isEmpty,
isEmptyObject,
isNumber,
isObject,
isString,
isValidBucketName,
isValidObjectName,
} from './internal/helper.ts'
import type { Encryption, ObjectMetaData, RequestHeaders } from './internal/type.ts'
import { RETENTION_MODES } from './internal/type.ts'
export { ENCRYPTION_TYPES, LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './internal/type.ts'
export const DEFAULT_REGION = 'us-east-1'
export const PRESIGN_EXPIRY_DAYS_MAX = 24 * 60 * 60 * 7 // 7 days in seconds
export interface ICopySourceOptions {
Bucket: string
Object: string
/**
* Valid versionId
*/
VersionID?: string
/**
* Etag to match
*/
MatchETag?: string
/**
* Etag to exclude
*/
NoMatchETag?: string
/**
* Modified Date of the object/part. UTC Date in string format
*/
MatchModifiedSince?: string | null
/**
* Modified Date of the object/part to exclude UTC Date in string format
*/
MatchUnmodifiedSince?: string | null
/**
* true or false Object range to match
*/
MatchRange?: boolean
Start?: number
End?: number
Encryption?: Encryption
}
export class CopySourceOptions {
public readonly Bucket: string
public readonly Object: string
public readonly VersionID: string
public MatchETag: string
private readonly NoMatchETag: string
private readonly MatchModifiedSince: string | null
private readonly MatchUnmodifiedSince: string | null
public readonly MatchRange: boolean
public readonly Start: number
public readonly End: number
private readonly Encryption?: Encryption
constructor({
Bucket,
Object,
VersionID = '',
MatchETag = '',
NoMatchETag = '',
MatchModifiedSince = null,
MatchUnmodifiedSince = null,
MatchRange = false,
Start = 0,
End = 0,
Encryption = undefined,
}: ICopySourceOptions) {
this.Bucket = Bucket
this.Object = Object
this.VersionID = VersionID
this.MatchETag = MatchETag
this.NoMatchETag = NoMatchETag
this.MatchModifiedSince = MatchModifiedSince
this.MatchUnmodifiedSince = MatchUnmodifiedSince
this.MatchRange = MatchRange
this.Start = Start
this.End = End
this.Encryption = Encryption
}
validate() {
if (!isValidBucketName(this.Bucket)) {
throw new errors.InvalidBucketNameError('Invalid Source bucket name: ' + this.Bucket)
}
if (!isValidObjectName(this.Object)) {
throw new errors.InvalidObjectNameError(`Invalid Source object name: ${this.Object}`)
}
if ((this.MatchRange && this.Start !== -1 && this.End !== -1 && this.Start > this.End) || this.Start < 0) {
throw new errors.InvalidObjectNameError('Source start must be non-negative, and start must be at most end.')
} else if ((this.MatchRange && !isNumber(this.Start)) || !isNumber(this.End)) {
throw new errors.InvalidObjectNameError(
'MatchRange is specified. But Invalid Start and End values are specified.',
)
}
return true
}
getHeaders(): RequestHeaders {
const headerOptions: RequestHeaders = {}
headerOptions['x-amz-copy-source'] = encodeURI(this.Bucket + '/' + this.Object)
if (!isEmpty(this.VersionID)) {
headerOptions['x-amz-copy-source'] = `${encodeURI(this.Bucket + '/' + this.Object)}?versionId=${this.VersionID}`
}
if (!isEmpty(this.MatchETag)) {
headerOptions['x-amz-copy-source-if-match'] = this.MatchETag
}
if (!isEmpty(this.NoMatchETag)) {
headerOptions['x-amz-copy-source-if-none-match'] = this.NoMatchETag
}
if (!isEmpty(this.MatchModifiedSince)) {
headerOptions['x-amz-copy-source-if-modified-since'] = this.MatchModifiedSince
}
if (!isEmpty(this.MatchUnmodifiedSince)) {
headerOptions['x-amz-copy-source-if-unmodified-since'] = this.MatchUnmodifiedSince
}
return headerOptions
}
}
/**
* @deprecated use nodejs fs module
*/
export function removeDirAndFiles(dirPath: string, removeSelf = true) {
if (removeSelf) {
return fs.rmSync(dirPath, { recursive: true, force: true })
}
fs.readdirSync(dirPath).forEach((item) => {
fs.rmSync(path.join(dirPath, item), { recursive: true, force: true })
})
}
export interface ICopyDestinationOptions {
/**
* Bucket name
*/
Bucket: string
/**
* Object Name for the destination (composed/copied) object defaults
*/
Object: string
/**
* Encryption configuration defaults to {}
* @default {}
*/
Encryption?: Encryption
UserMetadata?: ObjectMetaData
/**
* query-string encoded string or Record<string, string> Object
*/
UserTags?: Record<string, string> | string
LegalHold?: 'on' | 'off'
/**
* UTC Date String
*/
RetainUntilDate?: string
Mode?: RETENTION_MODES
MetadataDirective?: 'COPY' | 'REPLACE'
/**
* Extra headers for the target object
*/
Headers?: Record<string, string>
}
export class CopyDestinationOptions {
public readonly Bucket: string
public readonly Object: string
private readonly Encryption?: Encryption
private readonly UserMetadata?: ObjectMetaData
private readonly UserTags?: Record<string, string> | string
private readonly LegalHold?: 'on' | 'off'
private readonly RetainUntilDate?: string
private readonly Mode?: RETENTION_MODES
private readonly MetadataDirective?: string
private readonly Headers?: Record<string, string>
constructor({
Bucket,
Object,
Encryption,
UserMetadata,
UserTags,
LegalHold,
RetainUntilDate,
Mode,
MetadataDirective,
Headers,
}: ICopyDestinationOptions) {
this.Bucket = Bucket
this.Object = Object
this.Encryption = Encryption ?? undefined // null input will become undefined, easy for runtime assert
this.UserMetadata = UserMetadata
this.UserTags = UserTags
this.LegalHold = LegalHold
this.Mode = Mode // retention mode
this.RetainUntilDate = RetainUntilDate
this.MetadataDirective = MetadataDirective
this.Headers = Headers
}
getHeaders(): RequestHeaders {
const replaceDirective = 'REPLACE'
const headerOptions: RequestHeaders = {}
const userTags = this.UserTags
if (!isEmpty(userTags)) {
headerOptions['X-Amz-Tagging-Directive'] = replaceDirective
headerOptions['X-Amz-Tagging'] = isObject(userTags)
? querystring.stringify(userTags)
: isString(userTags)
? userTags
: ''
}
if (this.Mode) {
headerOptions['X-Amz-Object-Lock-Mode'] = this.Mode // GOVERNANCE or COMPLIANCE
}
if (this.RetainUntilDate) {
headerOptions['X-Amz-Object-Lock-Retain-Until-Date'] = this.RetainUntilDate // needs to be UTC.
}
if (this.LegalHold) {
headerOptions['X-Amz-Object-Lock-Legal-Hold'] = this.LegalHold // ON or OFF
}
if (this.UserMetadata) {
for (const [key, value] of Object.entries(this.UserMetadata)) {
headerOptions[`X-Amz-Meta-${key}`] = value.toString()
}
}
if (this.MetadataDirective) {
headerOptions[`X-Amz-Metadata-Directive`] = this.MetadataDirective
}
if (this.Encryption) {
const encryptionHeaders = getEncryptionHeaders(this.Encryption)
for (const [key, value] of Object.entries(encryptionHeaders)) {
headerOptions[key] = value
}
}
if (this.Headers) {
for (const [key, value] of Object.entries(this.Headers)) {
headerOptions[key] = value
}
}
return headerOptions
}
validate() {
if (!isValidBucketName(this.Bucket)) {
throw new errors.InvalidBucketNameError('Invalid Destination bucket name: ' + this.Bucket)
}
if (!isValidObjectName(this.Object)) {
throw new errors.InvalidObjectNameError(`Invalid Destination object name: ${this.Object}`)
}
if (!isEmpty(this.UserMetadata) && !isObject(this.UserMetadata)) {
throw new errors.InvalidObjectNameError(`Destination UserMetadata should be an object with key value pairs`)
}
if (!isEmpty(this.Mode) && ![RETENTION_MODES.GOVERNANCE, RETENTION_MODES.COMPLIANCE].includes(this.Mode)) {
throw new errors.InvalidObjectNameError(
`Invalid Mode specified for destination object it should be one of [GOVERNANCE,COMPLIANCE]`,
)
}
if (this.Encryption !== undefined && isEmptyObject(this.Encryption)) {
throw new errors.InvalidObjectNameError(`Invalid Encryption configuration for destination object `)
}
return true
}
}
/**
* maybe this should be a generic type for Records, leave it for later refactor
*/
export class SelectResults {
private records?: unknown
private response?: unknown
private stats?: string
private progress?: unknown
constructor({
records, // parsed data as stream
response, // original response stream
stats, // stats as xml
progress, // stats as xml
}: {
records?: unknown
response?: unknown
stats?: string
progress?: unknown
}) {
this.records = records
this.response = response
this.stats = stats
this.progress = progress
}
setStats(stats: string) {
this.stats = stats
}
getStats() {
return this.stats
}
setProgress(progress: unknown) {
this.progress = progress
}
getProgress() {
return this.progress
}
setResponse(response: unknown) {
this.response = response
}
getResponse() {
return this.response
}
setRecords(records: unknown) {
this.records = records
}
getRecords(): unknown {
return this.records
}
}
+14
View File
@@ -0,0 +1,14 @@
// promise helper for stdlib
import * as fs from 'node:fs'
import * as stream from 'node:stream'
import { promisify } from 'node:util'
// TODO: use "node:fs/promise" directly after we stop testing on nodejs 12
export { promises as fsp } from 'node:fs'
export const streamPromise = {
// node:stream/promises Added in: v15.0.0
pipeline: promisify(stream.pipeline),
}
export const fstat = promisify(fs.fstat)
+18
View File
@@ -0,0 +1,18 @@
// wrapper an async function that support callback style API.
// It will preserve 'this'.
export function callbackify(fn) {
return function () {
const args = [...arguments]
const callback = args.pop()
// If the last argument is a function, assume it's the callback.
if (typeof callback === 'function') {
return fn.apply(this, args).then(
(result) => callback(null, result),
(err) => callback(err),
)
}
return fn.apply(this, arguments)
}
}
File diff suppressed because it is too large Load Diff
+30
View File
@@ -0,0 +1,30 @@
export class CopyConditions {
public modified = ''
public unmodified = ''
public matchETag = ''
public matchETagExcept = ''
setModified(date: Date): void {
if (!(date instanceof Date)) {
throw new TypeError('date must be of type Date')
}
this.modified = date.toUTCString()
}
setUnmodified(date: Date): void {
if (!(date instanceof Date)) {
throw new TypeError('date must be of type Date')
}
this.unmodified = date.toUTCString()
}
setMatchETag(etag: string): void {
this.matchETag = etag
}
setMatchETagExcept(etag: string): void {
this.matchETagExcept = etag
}
}
+136
View File
@@ -0,0 +1,136 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2020 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as stream from 'node:stream'
import * as errors from '../errors.ts'
import type { TypedClient } from './client.ts'
import { isBoolean, isString, isValidBucketName, isValidPrefix, uriEscape } from './helper.ts'
import { readAsString } from './response.ts'
import type { BucketItemWithMetadata, BucketStream } from './type.ts'
import { parseListObjectsV2WithMetadata } from './xml-parser.ts'
export class Extensions {
constructor(private readonly client: TypedClient) {}
/**
* List the objects in the bucket using S3 ListObjects V2 With Metadata
*
* @param bucketName - name of the bucket
* @param prefix - the prefix of the objects that should be listed (optional, default `''`)
* @param recursive - `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`)
* @param startAfter - Specifies the key to start after when listing objects in a bucket. (optional, default `''`)
* @returns stream emitting the objects in the bucket, the object is of the format:
*/
public listObjectsV2WithMetadata(
bucketName: string,
prefix?: string,
recursive?: boolean,
startAfter?: string,
): BucketStream<BucketItemWithMetadata> {
if (prefix === undefined) {
prefix = ''
}
if (recursive === undefined) {
recursive = false
}
if (startAfter === undefined) {
startAfter = ''
}
if (!isValidBucketName(bucketName)) {
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
}
if (!isValidPrefix(prefix)) {
throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`)
}
if (!isString(prefix)) {
throw new TypeError('prefix should be of type "string"')
}
if (!isBoolean(recursive)) {
throw new TypeError('recursive should be of type "boolean"')
}
if (!isString(startAfter)) {
throw new TypeError('startAfter should be of type "string"')
}
// if recursive is false set delimiter to '/'
const delimiter = recursive ? '' : '/'
return stream.Readable.from(this.listObjectsV2WithMetadataGen(bucketName, prefix, delimiter, startAfter), {
objectMode: true,
})
}
private async *listObjectsV2WithMetadataGen(
bucketName: string,
prefix: string,
delimiter: string,
startAfter: string,
): AsyncIterable<BucketItemWithMetadata> {
let ended = false
let continuationToken = ''
do {
const result = await this.listObjectsV2WithMetadataQuery(
bucketName,
prefix,
continuationToken,
delimiter,
startAfter,
)
ended = !result.isTruncated
continuationToken = result.nextContinuationToken
for (const obj of result.objects) {
yield obj
}
} while (!ended)
}
private async listObjectsV2WithMetadataQuery(
bucketName: string,
prefix: string,
continuationToken: string,
delimiter: string,
startAfter: string,
) {
const queries = []
// Call for listing objects v2 API
queries.push(`list-type=2`)
queries.push(`encoding-type=url`)
// escape every value in query string, except maxKeys
queries.push(`prefix=${uriEscape(prefix)}`)
queries.push(`delimiter=${uriEscape(delimiter)}`)
queries.push(`metadata=true`)
if (continuationToken) {
continuationToken = uriEscape(continuationToken)
queries.push(`continuation-token=${continuationToken}`)
}
// Set start-after
if (startAfter) {
startAfter = uriEscape(startAfter)
queries.push(`start-after=${startAfter}`)
}
queries.push(`max-keys=1000`)
queries.sort()
let query = ''
if (queries.length > 0) {
query = `${queries.join('&')}`
}
const method = 'GET'
const res = await this.client.makeRequestAsync({ method, bucketName, query })
return parseListObjectsV2WithMetadata(await readAsString(res))
}
}
+601
View File
@@ -0,0 +1,601 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as crypto from 'node:crypto'
import * as stream from 'node:stream'
import { XMLParser } from 'fast-xml-parser'
import ipaddr from 'ipaddr.js'
import _ from 'lodash'
import * as mime from 'mime-types'
import { fsp, fstat } from './async.ts'
import type { Binary, Encryption, ObjectMetaData, RequestHeaders, ResponseHeader } from './type.ts'
import { ENCRYPTION_TYPES } from './type.ts'
const MetaDataHeaderPrefix = 'x-amz-meta-'
export function hashBinary(buf: Buffer, enableSHA256: boolean) {
let sha256sum = ''
if (enableSHA256) {
sha256sum = crypto.createHash('sha256').update(buf).digest('hex')
}
const md5sum = crypto.createHash('md5').update(buf).digest('base64')
return { md5sum, sha256sum }
}
// S3 percent-encodes some extra non-standard characters in a URI . So comply with S3.
const encodeAsHex = (c: string) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`
export function uriEscape(uriStr: string): string {
return encodeURIComponent(uriStr).replace(/[!'()*]/g, encodeAsHex)
}
export function uriResourceEscape(string: string) {
return uriEscape(string).replace(/%2F/g, '/')
}
export function getScope(region: string, date: Date, serviceName = 's3') {
return `${makeDateShort(date)}/${region}/${serviceName}/aws4_request`
}
/**
* isAmazonEndpoint - true if endpoint is 's3.amazonaws.com' or 's3.cn-north-1.amazonaws.com.cn'
*/
export function isAmazonEndpoint(endpoint: string) {
return endpoint === 's3.amazonaws.com' || endpoint === 's3.cn-north-1.amazonaws.com.cn'
}
/**
* isVirtualHostStyle - verify if bucket name is support with virtual
* hosts. bucketNames with periods should be always treated as path
* style if the protocol is 'https:', this is due to SSL wildcard
* limitation. For all other buckets and Amazon S3 endpoint we will
* default to virtual host style.
*/
export function isVirtualHostStyle(endpoint: string, protocol: string, bucket: string, pathStyle: boolean) {
if (protocol === 'https:' && bucket.includes('.')) {
return false
}
return isAmazonEndpoint(endpoint) || !pathStyle
}
export function isValidIP(ip: string) {
return ipaddr.isValid(ip)
}
/**
* @returns if endpoint is valid domain.
*/
export function isValidEndpoint(endpoint: string) {
return isValidDomain(endpoint) || isValidIP(endpoint)
}
/**
* @returns if input host is a valid domain.
*/
export function isValidDomain(host: string) {
if (!isString(host)) {
return false
}
// See RFC 1035, RFC 3696.
if (host.length === 0 || host.length > 255) {
return false
}
// Host cannot start or end with a '-'
if (host[0] === '-' || host.slice(-1) === '-') {
return false
}
// Host cannot start or end with a '_'
if (host[0] === '_' || host.slice(-1) === '_') {
return false
}
// Host cannot start with a '.'
if (host[0] === '.') {
return false
}
const nonAlphaNumerics = '`~!@#$%^&*()+={}[]|\\"\';:><?/'
// All non alphanumeric characters are invalid.
for (const char of nonAlphaNumerics) {
if (host.includes(char)) {
return false
}
}
// No need to regexp match, since the list is non-exhaustive.
// We let it be valid and fail later.
return true
}
/**
* Probes contentType using file extensions.
*
* @example
* ```
* // return 'image/png'
* probeContentType('file.png')
* ```
*/
export function probeContentType(path: string) {
let contentType = mime.lookup(path)
if (!contentType) {
contentType = 'application/octet-stream'
}
return contentType
}
/**
* is input port valid.
*/
export function isValidPort(port: unknown): port is number {
// Convert string port to number if needed
const portNum = typeof port === 'string' ? parseInt(port, 10) : port
// verify if port is a valid number
if (!isNumber(portNum) || isNaN(portNum)) {
return false
}
// port `0` is valid and special case
return 0 <= portNum && portNum <= 65535
}
export function isValidBucketName(bucket: unknown) {
if (!isString(bucket)) {
return false
}
// bucket length should be less than and no more than 63
// characters long.
if (bucket.length < 3 || bucket.length > 63) {
return false
}
// bucket with successive periods is invalid.
if (bucket.includes('..')) {
return false
}
// bucket cannot have ip address style.
if (/[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/.test(bucket)) {
return false
}
// bucket should begin with alphabet/number and end with alphabet/number,
// with alphabet/number/.- in the middle.
if (/^[a-z0-9][a-z0-9.-]+[a-z0-9]$/.test(bucket)) {
return true
}
return false
}
/**
* check if objectName is a valid object name
*/
export function isValidObjectName(objectName: unknown) {
if (!isValidPrefix(objectName)) {
return false
}
return objectName.length !== 0
}
/**
* check if prefix is valid
*/
export function isValidPrefix(prefix: unknown): prefix is string {
if (!isString(prefix)) {
return false
}
if (prefix.length > 1024) {
return false
}
return true
}
/**
* check if typeof arg number
*/
export function isNumber(arg: unknown): arg is number {
return typeof arg === 'number'
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type AnyFunction = (...args: any[]) => any
/**
* check if typeof arg function
*/
export function isFunction(arg: unknown): arg is AnyFunction {
return typeof arg === 'function'
}
/**
* check if typeof arg string
*/
export function isString(arg: unknown): arg is string {
return typeof arg === 'string'
}
/**
* check if typeof arg object
*/
export function isObject(arg: unknown): arg is object {
return typeof arg === 'object' && arg !== null
}
/**
* check if object is readable stream
*/
export function isReadableStream(arg: unknown): arg is stream.Readable {
// eslint-disable-next-line @typescript-eslint/unbound-method
return isObject(arg) && isFunction((arg as stream.Readable)._read)
}
/**
* check if arg is boolean
*/
export function isBoolean(arg: unknown): arg is boolean {
return typeof arg === 'boolean'
}
export function isEmpty(o: unknown): o is null | undefined {
return _.isEmpty(o)
}
export function isEmptyObject(o: Record<string, unknown>): boolean {
return Object.values(o).filter((x) => x !== undefined).length !== 0
}
export function isDefined<T>(o: T): o is Exclude<T, null | undefined> {
return o !== null && o !== undefined
}
/**
* check if arg is a valid date
*/
export function isValidDate(arg: unknown): arg is Date {
// @ts-expect-error checknew Date(Math.NaN)
return arg instanceof Date && !isNaN(arg)
}
/**
* Create a Date string with format: 'YYYYMMDDTHHmmss' + Z
*/
export function makeDateLong(date?: Date): string {
date = date || new Date()
// Gives format like: '2017-08-07T16:28:59.889Z'
const s = date.toISOString()
return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 13) + s.slice(14, 16) + s.slice(17, 19) + 'Z'
}
/**
* Create a Date string with format: 'YYYYMMDD'
*/
export function makeDateShort(date?: Date) {
date = date || new Date()
// Gives format like: '2017-08-07T16:28:59.889Z'
const s = date.toISOString()
return s.slice(0, 4) + s.slice(5, 7) + s.slice(8, 10)
}
/**
* pipesetup sets up pipe() from left to right os streams array
* pipesetup will also make sure that error emitted at any of the upstream Stream
* will be emitted at the last stream. This makes error handling simple
*/
export function pipesetup(...streams: [stream.Readable, ...stream.Duplex[], stream.Writable]) {
// @ts-expect-error ts can't narrow this
return streams.reduce((src: stream.Readable, dst: stream.Writable) => {
src.on('error', (err) => dst.emit('error', err))
return src.pipe(dst)
})
}
/**
* return a Readable stream that emits data
*/
export function readableStream(data: unknown): stream.Readable {
const s = new stream.Readable()
s._read = () => {}
s.push(data)
s.push(null)
return s
}
/**
* Process metadata to insert appropriate value to `content-type` attribute
*/
export function insertContentType(metaData: ObjectMetaData, filePath: string): ObjectMetaData {
// check if content-type attribute present in metaData
for (const key in metaData) {
if (key.toLowerCase() === 'content-type') {
return metaData
}
}
// if `content-type` attribute is not present in metadata, then infer it from the extension in filePath
return {
...metaData,
'content-type': probeContentType(filePath),
}
}
/**
* Function prepends metadata with the appropriate prefix if it is not already on
*/
export function prependXAMZMeta(metaData?: ObjectMetaData): RequestHeaders {
if (!metaData) {
return {}
}
return _.mapKeys(metaData, (value, key) => {
if (isAmzHeader(key) || isSupportedHeader(key) || isStorageClassHeader(key)) {
return key
}
return MetaDataHeaderPrefix + key
})
}
/**
* Checks if it is a valid header according to the AmazonS3 API
*/
export function isAmzHeader(key: string) {
const temp = key.toLowerCase()
return (
temp.startsWith(MetaDataHeaderPrefix) ||
temp === 'x-amz-acl' ||
temp.startsWith('x-amz-server-side-encryption-') ||
temp === 'x-amz-server-side-encryption'
)
}
/**
* Checks if it is a supported Header
*/
export function isSupportedHeader(key: string) {
const supported_headers = [
'content-type',
'cache-control',
'content-encoding',
'content-disposition',
'content-language',
'x-amz-website-redirect-location',
'if-none-match',
'if-match',
]
return supported_headers.includes(key.toLowerCase())
}
/**
* Checks if it is a storage header
*/
export function isStorageClassHeader(key: string) {
return key.toLowerCase() === 'x-amz-storage-class'
}
export function extractMetadata(headers: ResponseHeader) {
return _.mapKeys(
_.pickBy(headers, (value, key) => isSupportedHeader(key) || isStorageClassHeader(key) || isAmzHeader(key)),
(value, key) => {
const lower = key.toLowerCase()
if (lower.startsWith(MetaDataHeaderPrefix)) {
return lower.slice(MetaDataHeaderPrefix.length)
}
return key
},
)
}
export function getVersionId(headers: ResponseHeader = {}) {
return headers['x-amz-version-id'] || null
}
export function getSourceVersionId(headers: ResponseHeader = {}) {
return headers['x-amz-copy-source-version-id'] || null
}
export function sanitizeETag(etag = ''): string {
const replaceChars: Record<string, string> = {
'"': '',
'&quot;': '',
'&#34;': '',
'&QUOT;': '',
'&#x00022': '',
}
return etag.replace(/^("|&quot;|&#34;)|("|&quot;|&#34;)$/g, (m) => replaceChars[m] as string)
}
export function toMd5(payload: Binary): string {
// use string from browser and buffer from nodejs
// browser support is tested only against minio server
return crypto.createHash('md5').update(Buffer.from(payload)).digest().toString('base64')
}
export function toSha256(payload: Binary): string {
return crypto.createHash('sha256').update(payload).digest('hex')
}
/**
* toArray returns a single element array with param being the element,
* if param is just a string, and returns 'param' back if it is an array
* So, it makes sure param is always an array
*/
export function toArray<T = unknown>(param: T | T[]): Array<T> {
if (!Array.isArray(param)) {
return [param] as T[]
}
return param
}
export function sanitizeObjectKey(objectName: string): string {
// + symbol characters are not decoded as spaces in JS. so replace them first and decode to get the correct result.
const asStrName = (objectName ? objectName.toString() : '').replace(/\+/g, ' ')
return decodeURIComponent(asStrName)
}
export function sanitizeSize(size?: string): number | undefined {
return size ? Number.parseInt(size) : undefined
}
export const PART_CONSTRAINTS = {
// absMinPartSize - absolute minimum part size (5 MiB)
ABS_MIN_PART_SIZE: 1024 * 1024 * 5,
// MIN_PART_SIZE - minimum part size 16MiB per object after which
MIN_PART_SIZE: 1024 * 1024 * 16,
// MAX_PARTS_COUNT - maximum number of parts for a single multipart session.
MAX_PARTS_COUNT: 10000,
// MAX_PART_SIZE - maximum part size 5GiB for a single multipart upload
// operation.
MAX_PART_SIZE: 1024 * 1024 * 1024 * 5,
// MAX_SINGLE_PUT_OBJECT_SIZE - maximum size 5GiB of object per PUT
// operation.
MAX_SINGLE_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 5,
// MAX_MULTIPART_PUT_OBJECT_SIZE - maximum size 5TiB of object for
// Multipart operation.
MAX_MULTIPART_PUT_OBJECT_SIZE: 1024 * 1024 * 1024 * 1024 * 5,
}
const GENERIC_SSE_HEADER = 'X-Amz-Server-Side-Encryption'
const ENCRYPTION_HEADERS = {
// sseGenericHeader is the AWS SSE header used for SSE-S3 and SSE-KMS.
sseGenericHeader: GENERIC_SSE_HEADER,
// sseKmsKeyID is the AWS SSE-KMS key id.
sseKmsKeyID: GENERIC_SSE_HEADER + '-Aws-Kms-Key-Id',
} as const
/**
* Return Encryption headers
* @param encConfig
* @returns an object with key value pairs that can be used in headers.
*/
export function getEncryptionHeaders(encConfig: Encryption): RequestHeaders {
const encType = encConfig.type
if (!isEmpty(encType)) {
if (encType === ENCRYPTION_TYPES.SSEC) {
return {
[ENCRYPTION_HEADERS.sseGenericHeader]: 'AES256',
}
} else if (encType === ENCRYPTION_TYPES.KMS) {
return {
[ENCRYPTION_HEADERS.sseGenericHeader]: encConfig.SSEAlgorithm,
[ENCRYPTION_HEADERS.sseKmsKeyID]: encConfig.KMSMasterKeyID,
}
}
}
return {}
}
export function partsRequired(size: number): number {
const maxPartSize = PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE / (PART_CONSTRAINTS.MAX_PARTS_COUNT - 1)
let requiredPartSize = size / maxPartSize
if (size % maxPartSize > 0) {
requiredPartSize++
}
requiredPartSize = Math.trunc(requiredPartSize)
return requiredPartSize
}
/**
* calculateEvenSplits - computes splits for a source and returns
* start and end index slices. Splits happen evenly to be sure that no
* part is less than 5MiB, as that could fail the multipart request if
* it is not the last part.
*/
export function calculateEvenSplits<T extends { Start?: number }>(
size: number,
objInfo: T,
): {
startIndex: number[]
objInfo: T
endIndex: number[]
} | null {
if (size === 0) {
return null
}
const reqParts = partsRequired(size)
const startIndexParts: number[] = []
const endIndexParts: number[] = []
let start = objInfo.Start
if (isEmpty(start) || start === -1) {
start = 0
}
const divisorValue = Math.trunc(size / reqParts)
const reminderValue = size % reqParts
let nextStart = start
for (let i = 0; i < reqParts; i++) {
let curPartSize = divisorValue
if (i < reminderValue) {
curPartSize++
}
const currentStart = nextStart
const currentEnd = currentStart + curPartSize - 1
nextStart = currentEnd + 1
startIndexParts.push(currentStart)
endIndexParts.push(currentEnd)
}
return { startIndex: startIndexParts, endIndex: endIndexParts, objInfo: objInfo }
}
const fxp = new XMLParser({ numberParseOptions: { eNotation: false, hex: true, leadingZeros: true } })
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function parseXml(xml: string): any {
const result = fxp.parse(xml)
if (result.Error) {
throw result.Error
}
return result
}
/**
* get content size of object content to upload
*/
export async function getContentLength(s: stream.Readable | Buffer | string): Promise<number | null> {
// use length property of string | Buffer
if (typeof s === 'string' || Buffer.isBuffer(s)) {
return s.length
}
// property of `fs.ReadStream`
const filePath = (s as unknown as Record<string, unknown>).path as string | undefined
if (filePath && typeof filePath === 'string') {
const stat = await fsp.lstat(filePath)
return stat.size
}
// property of `fs.ReadStream`
const fd = (s as unknown as Record<string, unknown>).fd as number | null | undefined
if (fd && typeof fd === 'number') {
const stat = await fstat(fd)
return stat.size
}
return null
}
+23
View File
@@ -0,0 +1,23 @@
/**
* joinHostPort combines host and port into a network address of the
* form "host:port". If host contains a colon, as found in literal
* IPv6 addresses, then JoinHostPort returns "[host]:port".
*
* @param host
* @param port
* @returns Cleaned up host
* @internal
*/
export function joinHostPort(host: string, port?: number): string {
if (port === undefined) {
return host
}
// We assume that host is a literal IPv6 address if host has
// colons.
if (host.includes(':')) {
return `[${host}]:${port.toString()}`
}
return `${host}:${port.toString()}`
}
+99
View File
@@ -0,0 +1,99 @@
// Build PostPolicy object that can be signed by presignedPostPolicy
import * as errors from '../errors.ts'
import { isObject, isValidBucketName, isValidObjectName, isValidPrefix } from './helper.ts'
import type { ObjectMetaData } from './type.ts'
export class PostPolicy {
public policy: { conditions: (string | number)[][]; expiration?: string } = {
conditions: [],
}
public formData: Record<string, string> = {}
// set expiration date
setExpires(date: Date) {
if (!date) {
throw new errors.InvalidDateError('Invalid date: cannot be null')
}
this.policy.expiration = date.toISOString()
}
// set object name
setKey(objectName: string) {
if (!isValidObjectName(objectName)) {
throw new errors.InvalidObjectNameError(`Invalid object name : ${objectName}`)
}
this.policy.conditions.push(['eq', '$key', objectName])
this.formData.key = objectName
}
// set object name prefix, i.e policy allows any keys with this prefix
setKeyStartsWith(prefix: string) {
if (!isValidPrefix(prefix)) {
throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`)
}
this.policy.conditions.push(['starts-with', '$key', prefix])
this.formData.key = prefix
}
// set bucket name
setBucket(bucketName: string) {
if (!isValidBucketName(bucketName)) {
throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`)
}
this.policy.conditions.push(['eq', '$bucket', bucketName])
this.formData.bucket = bucketName
}
// set Content-Type
setContentType(type: string) {
if (!type) {
throw new Error('content-type cannot be null')
}
this.policy.conditions.push(['eq', '$Content-Type', type])
this.formData['Content-Type'] = type
}
// set Content-Type prefix, i.e image/ allows any image
setContentTypeStartsWith(prefix: string) {
if (!prefix) {
throw new Error('content-type cannot be null')
}
this.policy.conditions.push(['starts-with', '$Content-Type', prefix])
this.formData['Content-Type'] = prefix
}
// set Content-Disposition
setContentDisposition(value: string) {
if (!value) {
throw new Error('content-disposition cannot be null')
}
this.policy.conditions.push(['eq', '$Content-Disposition', value])
this.formData['Content-Disposition'] = value
}
// set minimum/maximum length of what Content-Length can be.
setContentLengthRange(min: number, max: number) {
if (min > max) {
throw new Error('min cannot be more than max')
}
if (min < 0) {
throw new Error('min should be > 0')
}
if (max < 0) {
throw new Error('max should be > 0')
}
this.policy.conditions.push(['content-length-range', min, max])
}
// set user defined metadata
setUserMetaData(metaData: ObjectMetaData) {
if (!isObject(metaData)) {
throw new TypeError('metadata should be of type "object"')
}
Object.entries(metaData).forEach(([key, value]) => {
const amzMetaDataKey = `x-amz-meta-${key}`
this.policy.conditions.push(['eq', `$${amzMetaDataKey}`, value])
this.formData[amzMetaDataKey] = value.toString()
})
}
}
+100
View File
@@ -0,0 +1,100 @@
import type * as http from 'node:http'
import type * as https from 'node:https'
import type * as stream from 'node:stream'
import { pipeline } from 'node:stream'
import { promisify } from 'node:util'
import type { Transport } from './type.ts'
const pipelineAsync = promisify(pipeline)
export async function request(
transport: Transport,
opt: https.RequestOptions,
body: Buffer | string | stream.Readable | null = null,
): Promise<http.IncomingMessage> {
return new Promise<http.IncomingMessage>((resolve, reject) => {
const requestObj = transport.request(opt, (response) => {
resolve(response)
})
requestObj.on('error', reject)
if (!body || Buffer.isBuffer(body) || typeof body === 'string') {
requestObj.end(body)
} else {
pipelineAsync(body, requestObj).catch(reject)
}
})
}
const MAX_RETRIES = 10
const EXP_BACK_OFF_BASE_DELAY = 1000 // Base delay for exponential backoff
const ADDITIONAL_DELAY_FACTOR = 1.0 // to avoid synchronized retries
// Retryable error codes for HTTP ( ref: minio-go)
export const retryHttpCodes: Record<string, boolean> = {
408: true,
429: true,
499: true,
500: true,
502: true,
503: true,
504: true,
520: true,
}
const isHttpRetryable = (httpResCode: number) => {
return retryHttpCodes[httpResCode] !== undefined
}
const sleep = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms))
}
const getExpBackOffDelay = (retryCount: number) => {
const backOffBy = EXP_BACK_OFF_BASE_DELAY * 2 ** retryCount
const additionalDelay = Math.random() * backOffBy * ADDITIONAL_DELAY_FACTOR
return backOffBy + additionalDelay
}
export async function requestWithRetry(
transport: Transport,
opt: https.RequestOptions,
body: Buffer | string | stream.Readable | null = null,
maxRetries: number = MAX_RETRIES,
): Promise<http.IncomingMessage> {
let attempt = 0
let isRetryable = false
while (attempt <= maxRetries) {
try {
const response = await request(transport, opt, body)
// Check if the HTTP status code is retryable
if (isHttpRetryable(response.statusCode as number)) {
isRetryable = true
throw new Error(`Retryable HTTP status: ${response.statusCode}`) // trigger retry attempt with calculated delay
}
return response // Success, return the raw response
} catch (err) {
if (isRetryable) {
attempt++
isRetryable = false
if (attempt > maxRetries) {
throw new Error(`Request failed after ${maxRetries} retries: ${err}`)
}
const delay = getExpBackOffDelay(attempt)
// eslint-disable-next-line no-console
console.warn(
`${new Date().toLocaleString()} Retrying request (attempt ${attempt}/${maxRetries}) after ${delay}ms due to: ${err}`,
)
await sleep(delay)
} else {
throw err // re-throw if any request, syntax errors
}
}
}
throw new Error(`${MAX_RETRIES} Retries exhausted, request failed.`)
}
+26
View File
@@ -0,0 +1,26 @@
import type http from 'node:http'
import type stream from 'node:stream'
export async function readAsBuffer(res: stream.Readable): Promise<Buffer> {
return new Promise((resolve, reject) => {
const body: Buffer[] = []
res
.on('data', (chunk: Buffer) => body.push(chunk))
.on('error', (e) => reject(e))
.on('end', () => resolve(Buffer.concat(body)))
})
}
export async function readAsString(res: http.IncomingMessage): Promise<string> {
const body = await readAsBuffer(res)
return body.toString()
}
export async function drainResponse(res: stream.Readable): Promise<void> {
return new Promise((resolve, reject) => {
res
.on('data', () => {})
.on('error', (e) => reject(e))
.on('end', () => resolve())
})
}
+70
View File
@@ -0,0 +1,70 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { isString } from './helper.ts'
// List of currently supported endpoints.
const awsS3Endpoint = {
'af-south-1': 's3.af-south-1.amazonaws.com',
'ap-east-1': 's3.ap-east-1.amazonaws.com',
'ap-south-1': 's3.ap-south-1.amazonaws.com',
'ap-south-2': 's3.ap-south-2.amazonaws.com',
'ap-southeast-1': 's3.ap-southeast-1.amazonaws.com',
'ap-southeast-2': 's3.ap-southeast-2.amazonaws.com',
'ap-southeast-3': 's3.ap-southeast-3.amazonaws.com',
'ap-southeast-4': 's3.ap-southeast-4.amazonaws.com',
'ap-southeast-5': 's3.ap-southeast-5.amazonaws.com',
'ap-northeast-1': 's3.ap-northeast-1.amazonaws.com',
'ap-northeast-2': 's3.ap-northeast-2.amazonaws.com',
'ap-northeast-3': 's3.ap-northeast-3.amazonaws.com',
'ca-central-1': 's3.ca-central-1.amazonaws.com',
'ca-west-1': 's3.ca-west-1.amazonaws.com',
'cn-north-1': 's3.cn-north-1.amazonaws.com.cn',
'eu-central-1': 's3.eu-central-1.amazonaws.com',
'eu-central-2': 's3.eu-central-2.amazonaws.com',
'eu-north-1': 's3.eu-north-1.amazonaws.com',
'eu-south-1': 's3.eu-south-1.amazonaws.com',
'eu-south-2': 's3.eu-south-2.amazonaws.com',
'eu-west-1': 's3.eu-west-1.amazonaws.com',
'eu-west-2': 's3.eu-west-2.amazonaws.com',
'eu-west-3': 's3.eu-west-3.amazonaws.com',
'il-central-1': 's3.il-central-1.amazonaws.com',
'me-central-1': 's3.me-central-1.amazonaws.com',
'me-south-1': 's3.me-south-1.amazonaws.com',
'sa-east-1': 's3.sa-east-1.amazonaws.com',
'us-east-1': 's3.us-east-1.amazonaws.com',
'us-east-2': 's3.us-east-2.amazonaws.com',
'us-west-1': 's3.us-west-1.amazonaws.com',
'us-west-2': 's3.us-west-2.amazonaws.com',
'us-gov-east-1': 's3.us-gov-east-1.amazonaws.com',
'us-gov-west-1': 's3.us-gov-west-1.amazonaws.com',
// Add new endpoints here.
}
export type Region = keyof typeof awsS3Endpoint | string
// getS3Endpoint get relevant endpoint for the region.
export function getS3Endpoint(region: Region): string {
if (!isString(region)) {
throw new TypeError(`Invalid region: ${region}`)
}
const endpoint = (awsS3Endpoint as Record<string, string>)[region]
if (endpoint) {
return endpoint
}
return 's3.amazonaws.com'
}
+542
View File
@@ -0,0 +1,542 @@
import type * as http from 'node:http'
import type { Readable as ReadableStream } from 'node:stream'
import type { CopyDestinationOptions, CopySourceOptions } from '../helpers.ts'
import type { CopyConditions } from './copy-conditions.ts'
export type VersionIdentificator = {
versionId?: string
}
export type GetObjectOpts = VersionIdentificator & {
SSECustomerAlgorithm?: string
SSECustomerKey?: string
SSECustomerKeyMD5?: string
}
export type Binary = string | Buffer
// nodejs IncomingHttpHeaders is Record<string, string | string[]>, but it's actually this:
export type ResponseHeader = Record<string, string>
export type ObjectMetaData = Record<string, string | number>
export type RequestHeaders = Record<string, string | boolean | number | undefined>
export type Encryption =
| {
type: ENCRYPTION_TYPES.SSEC
}
| {
type: ENCRYPTION_TYPES.KMS
SSEAlgorithm?: string
KMSMasterKeyID?: string
}
export type EnabledOrDisabledStatus = 'Enabled' | 'Disabled'
export enum ENCRYPTION_TYPES {
/**
* SSEC represents server-side-encryption with customer provided keys
*/
SSEC = 'SSE-C',
/**
* KMS represents server-side-encryption with managed keys
*/
KMS = 'KMS',
}
export enum RETENTION_MODES {
GOVERNANCE = 'GOVERNANCE',
COMPLIANCE = 'COMPLIANCE',
}
export enum RETENTION_VALIDITY_UNITS {
DAYS = 'Days',
YEARS = 'Years',
}
export enum LEGAL_HOLD_STATUS {
ENABLED = 'ON',
DISABLED = 'OFF',
}
export type Transport = Pick<typeof http, 'request'>
export interface IRequest {
protocol: string
port?: number | string
method: string
path: string
headers: RequestHeaders
}
export type ICanonicalRequest = string
export interface IncompleteUploadedBucketItem {
key: string
uploadId: string
size: number
}
export interface MetadataItem {
Key: string
Value: string
}
export interface ItemBucketMetadataList {
Items: MetadataItem[]
}
export interface ItemBucketMetadata {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[key: string]: any
}
export interface ItemBucketTags {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[key: string]: any
}
export interface BucketItemFromList {
name: string
creationDate: Date
}
export interface BucketItemCopy {
etag: string
lastModified: Date
}
export type BucketItem =
| {
name: string
size: number
etag: string
prefix?: never
lastModified: Date
}
| {
name?: never
etag?: never
lastModified?: never
prefix: string
size: 0
}
export type BucketItemWithMetadata = BucketItem & {
metadata?: ItemBucketMetadata | ItemBucketMetadataList
tags?: ItemBucketTags
}
export interface BucketStream<T> extends ReadableStream {
on(event: 'data', listener: (item: T) => void): this
on(event: 'end' | 'pause' | 'readable' | 'resume' | 'close', listener: () => void): this
on(event: 'error', listener: (err: Error) => void): this
// eslint-disable-next-line @typescript-eslint/no-explicit-any
on(event: string | symbol, listener: (...args: any[]) => void): this
}
export interface BucketItemStat {
size: number
etag: string
lastModified: Date
metaData: ItemBucketMetadata
versionId?: string | null
}
export type StatObjectOpts = {
versionId?: string
}
/* Replication Config types */
export type ReplicationRuleStatus = {
Status: EnabledOrDisabledStatus
}
export type Tag = {
Key: string
Value: string
}
export type Tags = Record<string, string>
export type ReplicationRuleDestination = {
Bucket: string
StorageClass: string
}
export type ReplicationRuleAnd = {
Prefix: string
Tags: Tag[]
}
export type ReplicationRuleFilter = {
Prefix: string
And: ReplicationRuleAnd
Tag: Tag
}
export type ReplicaModifications = {
Status: ReplicationRuleStatus
}
export type SourceSelectionCriteria = {
ReplicaModifications: ReplicaModifications
}
export type ExistingObjectReplication = {
Status: ReplicationRuleStatus
}
export type ReplicationRule = {
ID: string
Status: ReplicationRuleStatus
Priority: number
DeleteMarkerReplication: ReplicationRuleStatus // should be set to "Disabled" by default
DeleteReplication: ReplicationRuleStatus
Destination: ReplicationRuleDestination
Filter: ReplicationRuleFilter
SourceSelectionCriteria: SourceSelectionCriteria
ExistingObjectReplication: ExistingObjectReplication
}
export type ReplicationConfigOpts = {
role: string
rules: ReplicationRule[]
}
export type ReplicationConfig = {
ReplicationConfiguration: ReplicationConfigOpts
}
/* Replication Config types */
export type ResultCallback<T> = (error: Error | null, result: T) => void
export type GetObjectLegalHoldOptions = {
versionId: string
}
/**
* @deprecated keep for backward compatible, use `LEGAL_HOLD_STATUS` instead
*/
export type LegalHoldStatus = LEGAL_HOLD_STATUS
export type PutObjectLegalHoldOptions = {
versionId?: string
status: LEGAL_HOLD_STATUS
}
export interface UploadedObjectInfo {
etag: string
versionId: string | null
}
export interface RetentionOptions {
versionId: string
mode?: RETENTION_MODES
retainUntilDate?: IsoDate
governanceBypass?: boolean
}
export type Retention = RetentionOptions | EmptyObject
export type IsoDate = string
export type EmptyObject = Record<string, never>
export type ObjectLockInfo =
| {
objectLockEnabled: EnabledOrDisabledStatus
mode: RETENTION_MODES
unit: RETENTION_VALIDITY_UNITS
validity: number
}
| EmptyObject
export type ObjectLockConfigParam = {
ObjectLockEnabled?: 'Enabled' | undefined
Rule?:
| {
DefaultRetention:
| {
Mode: RETENTION_MODES
Days: number
Years: number
}
| EmptyObject
}
| EmptyObject
}
export type VersioningEnabled = 'Enabled'
export type VersioningSuspended = 'Suspended'
export type TaggingOpts = {
versionId: string
}
export type PutTaggingParams = {
bucketName: string
objectName?: string
tags: Tags
putOpts?: TaggingOpts
}
export type RemoveTaggingParams = {
bucketName: string
objectName?: string
removeOpts?: TaggingOpts
}
export type InputSerialization = {
CompressionType?: 'NONE' | 'GZIP' | 'BZIP2'
CSV?: {
AllowQuotedRecordDelimiter?: boolean
Comments?: string
FieldDelimiter?: string
FileHeaderInfo?: 'NONE' | 'IGNORE' | 'USE'
QuoteCharacter?: string
QuoteEscapeCharacter?: string
RecordDelimiter?: string
}
JSON?: {
Type: 'DOCUMENT' | 'LINES'
}
Parquet?: EmptyObject
}
export type OutputSerialization = {
CSV?: {
FieldDelimiter?: string
QuoteCharacter?: string
QuoteEscapeCharacter?: string
QuoteFields?: string
RecordDelimiter?: string
}
JSON?: {
RecordDelimiter?: string
}
}
export type SelectProgress = { Enabled: boolean }
export type ScanRange = { Start: number; End: number }
export type SelectOptions = {
expression: string
expressionType?: string
inputSerialization: InputSerialization
outputSerialization: OutputSerialization
requestProgress?: SelectProgress
scanRange?: ScanRange
}
export type Expiration = {
Date?: string
Days: number
DeleteMarker?: boolean
DeleteAll?: boolean
}
export type RuleFilterAnd = {
Prefix: string
Tags: Tag[]
}
export type RuleFilter = {
And?: RuleFilterAnd
Prefix: string
Tag?: Tag[]
}
export type NoncurrentVersionExpiration = {
NoncurrentDays: number
NewerNoncurrentVersions?: number
}
export type NoncurrentVersionTransition = {
StorageClass: string
NoncurrentDays?: number
NewerNoncurrentVersions?: number
}
export type Transition = {
Date?: string
StorageClass: string
Days: number
}
export type AbortIncompleteMultipartUpload = {
DaysAfterInitiation: number
}
export type LifecycleRule = {
AbortIncompleteMultipartUpload?: AbortIncompleteMultipartUpload
ID: string
Prefix?: string
Status?: string
Expiration?: Expiration
Filter?: RuleFilter
NoncurrentVersionExpiration?: NoncurrentVersionExpiration
NoncurrentVersionTransition?: NoncurrentVersionTransition
Transition?: Transition
}
export type LifecycleConfig = {
Rule: LifecycleRule[]
}
export type LifeCycleConfigParam = LifecycleConfig | null | undefined | ''
export type ApplySSEByDefault = {
KmsMasterKeyID?: string
SSEAlgorithm: string
}
export type EncryptionRule = {
ApplyServerSideEncryptionByDefault?: ApplySSEByDefault
}
export type EncryptionConfig = {
Rule: EncryptionRule[]
}
export type GetObjectRetentionOpts = {
versionId: string
}
export type ObjectRetentionInfo = {
mode: RETENTION_MODES
retainUntilDate: string
}
export type RemoveObjectsEntry = {
name: string
versionId?: string
}
export type ObjectName = string
export type RemoveObjectsParam = ObjectName[] | RemoveObjectsEntry[]
export type RemoveObjectsRequestEntry = {
Key: string
VersionId?: string
}
export type RemoveObjectsResponse =
| null
| undefined
| {
Error?: {
Code?: string
Message?: string
Key?: string
VersionId?: string
}
}
export type CopyObjectResultV1 = {
etag: string
lastModified: string | Date
}
export type CopyObjectResultV2 = {
Bucket?: string
Key?: string
LastModified: string | Date
MetaData?: ResponseHeader
VersionId?: string | null
SourceVersionId?: string | null
Etag?: string
Size?: number
}
export type CopyObjectResult = CopyObjectResultV1 | CopyObjectResultV2
export type CopyObjectParams = [CopySourceOptions, CopyDestinationOptions] | [string, string, string, CopyConditions?]
export type ExcludedPrefix = {
Prefix: string
}
export type BucketVersioningConfiguration = {
Status: VersioningEnabled | VersioningSuspended
/* Below are minio only extensions */
MFADelete?: string
ExcludedPrefixes?: ExcludedPrefix[]
ExcludeFolders?: boolean
}
export type UploadPartConfig = {
bucketName: string
objectName: string
uploadID: string
partNumber: number
headers: RequestHeaders
sourceObj: string
}
export type PreSignRequestParams = { [key: string]: string }
/** List object api types **/
// Common types
export type CommonPrefix = {
Prefix: string
}
export type Owner = {
ID: string
DisplayName: string
}
export type Metadata = {
Items: MetadataItem[]
}
export type ObjectInfo = {
key?: string
name?: string
lastModified?: Date // time string of format "2006-01-02T15:04:05.000Z"
etag?: string
owner?: Owner
storageClass?: string
userMetadata?: Metadata
userTags?: string
prefix?: string
size?: number
}
export type ListObjectQueryRes = {
isTruncated?: boolean
nextMarker?: string
versionIdMarker?: string
objects?: ObjectInfo[]
}
export type ListObjectQueryOpts = {
Delimiter?: string
MaxKeys?: number
IncludeVersion?: boolean
}
/** List object api types **/
export type ObjectVersionEntry = {
IsLatest?: string
VersionId?: string
}
export type ObjectRowEntry = ObjectVersionEntry & {
Key: string
LastModified?: Date | undefined
ETag?: string
Size?: string
Owner?: Owner
StorageClass?: string
}
export interface ListBucketResultV1 {
Name?: string
Prefix?: string
ContinuationToken?: string
KeyCount?: string
Marker?: string
MaxKeys?: string
Delimiter?: string
IsTruncated?: boolean
Contents?: ObjectRowEntry[]
NextKeyMarker?: string
CommonPrefixes?: CommonPrefix[]
Version?: ObjectRowEntry[]
DeleteMarker?: ObjectRowEntry[]
VersionIdMarker?: string
NextVersionIdMarker?: string
}
+751
View File
@@ -0,0 +1,751 @@
import type * as http from 'node:http'
import type stream from 'node:stream'
import crc32 from 'buffer-crc32'
import { XMLParser } from 'fast-xml-parser'
import * as errors from '../errors.ts'
import { SelectResults } from '../helpers.ts'
import { isObject, parseXml, readableStream, sanitizeETag, sanitizeObjectKey, sanitizeSize, toArray } from './helper.ts'
import { readAsString } from './response.ts'
import type {
BucketItemFromList,
BucketItemWithMetadata,
CommonPrefix,
CopyObjectResultV1,
ListBucketResultV1,
ObjectInfo,
ObjectLockInfo,
ObjectRowEntry,
ReplicationConfig,
Tags,
} from './type.ts'
import { RETENTION_VALIDITY_UNITS } from './type.ts'
// parse XML response for bucket region
export function parseBucketRegion(xml: string): string {
// return region information
return parseXml(xml).LocationConstraint
}
const fxp = new XMLParser()
const fxpWithoutNumParser = new XMLParser({
// @ts-ignore
numberParseOptions: {
skipLike: /./,
},
})
// Parse XML and return information as Javascript types
// parse error XML response
export function parseError(xml: string, headerInfo: Record<string, unknown>) {
let xmlErr = {}
const xmlObj = fxp.parse(xml)
if (xmlObj.Error) {
xmlErr = xmlObj.Error
}
const e = new errors.S3Error() as unknown as Record<string, unknown>
Object.entries(xmlErr).forEach(([key, value]) => {
e[key.toLowerCase()] = value
})
Object.entries(headerInfo).forEach(([key, value]) => {
e[key] = value
})
return e
}
// Generates an Error object depending on http statusCode and XML body
export async function parseResponseError(response: http.IncomingMessage): Promise<Record<string, string>> {
const statusCode = response.statusCode
let code = '',
message = ''
if (statusCode === 301) {
code = 'MovedPermanently'
message = 'Moved Permanently'
} else if (statusCode === 307) {
code = 'TemporaryRedirect'
message = 'Are you using the correct endpoint URL?'
} else if (statusCode === 403) {
code = 'AccessDenied'
message = 'Valid and authorized credentials required'
} else if (statusCode === 404) {
code = 'NotFound'
message = 'Not Found'
} else if (statusCode === 405) {
code = 'MethodNotAllowed'
message = 'Method Not Allowed'
} else if (statusCode === 501) {
code = 'MethodNotAllowed'
message = 'Method Not Allowed'
} else if (statusCode === 503) {
code = 'SlowDown'
message = 'Please reduce your request rate.'
} else {
const hErrCode = response.headers['x-minio-error-code'] as string
const hErrDesc = response.headers['x-minio-error-desc'] as string
if (hErrCode && hErrDesc) {
code = hErrCode
message = hErrDesc
}
}
const headerInfo: Record<string, string | undefined | null> = {}
// A value created by S3 compatible server that uniquely identifies the request.
headerInfo.amzRequestid = response.headers['x-amz-request-id'] as string | undefined
// A special token that helps troubleshoot API replies and issues.
headerInfo.amzId2 = response.headers['x-amz-id-2'] as string | undefined
// Region where the bucket is located. This header is returned only
// in HEAD bucket and ListObjects response.
headerInfo.amzBucketRegion = response.headers['x-amz-bucket-region'] as string | undefined
const xmlString = await readAsString(response)
if (xmlString) {
throw parseError(xmlString, headerInfo)
}
// Message should be instantiated for each S3Errors.
const e = new errors.S3Error(message, { cause: headerInfo })
// S3 Error code.
e.code = code
Object.entries(headerInfo).forEach(([key, value]) => {
// @ts-expect-error force set error properties
e[key] = value
})
throw e
}
/**
* parse XML response for list objects v2 with metadata in a bucket
*/
export function parseListObjectsV2WithMetadata(xml: string) {
const result: {
objects: Array<BucketItemWithMetadata>
isTruncated: boolean
nextContinuationToken: string
} = {
objects: [],
isTruncated: false,
nextContinuationToken: '',
}
let xmlobj = parseXml(xml)
if (!xmlobj.ListBucketResult) {
throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"')
}
xmlobj = xmlobj.ListBucketResult
if (xmlobj.IsTruncated) {
result.isTruncated = xmlobj.IsTruncated
}
if (xmlobj.NextContinuationToken) {
result.nextContinuationToken = xmlobj.NextContinuationToken
}
if (xmlobj.Contents) {
toArray(xmlobj.Contents).forEach((content) => {
const name = sanitizeObjectKey(content.Key)
const lastModified = new Date(content.LastModified)
const etag = sanitizeETag(content.ETag)
const size = content.Size
let tags: Tags = {}
if (content.UserTags != null) {
toArray(content.UserTags.split('&')).forEach((tag) => {
const [key, value] = tag.split('=')
tags[key] = value
})
} else {
tags = {}
}
let metadata
if (content.UserMetadata != null) {
metadata = toArray(content.UserMetadata)[0]
} else {
metadata = null
}
result.objects.push({ name, lastModified, etag, size, metadata, tags })
})
}
if (xmlobj.CommonPrefixes) {
toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => {
result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 })
})
}
return result
}
export type UploadedPart = {
part: number
lastModified?: Date
etag: string
size: number
}
// parse XML response for list parts of an in progress multipart upload
export function parseListParts(xml: string): {
isTruncated: boolean
marker: number
parts: UploadedPart[]
} {
let xmlobj = parseXml(xml)
const result: {
isTruncated: boolean
marker: number
parts: UploadedPart[]
} = {
isTruncated: false,
parts: [],
marker: 0,
}
if (!xmlobj.ListPartsResult) {
throw new errors.InvalidXMLError('Missing tag: "ListPartsResult"')
}
xmlobj = xmlobj.ListPartsResult
if (xmlobj.IsTruncated) {
result.isTruncated = xmlobj.IsTruncated
}
if (xmlobj.NextPartNumberMarker) {
result.marker = toArray(xmlobj.NextPartNumberMarker)[0] || ''
}
if (xmlobj.Part) {
toArray(xmlobj.Part).forEach((p) => {
const part = parseInt(toArray(p.PartNumber)[0], 10)
const lastModified = new Date(p.LastModified)
const etag = p.ETag.replace(/^"/g, '')
.replace(/"$/g, '')
.replace(/^&quot;/g, '')
.replace(/&quot;$/g, '')
.replace(/^&#34;/g, '')
.replace(/&#34;$/g, '')
result.parts.push({ part, lastModified, etag, size: parseInt(p.Size, 10) })
})
}
return result
}
export function parseListBucket(xml: string): BucketItemFromList[] {
let result: BucketItemFromList[] = []
const listBucketResultParser = new XMLParser({
parseTagValue: true, // Enable parsing of values
numberParseOptions: {
leadingZeros: false, // Disable number parsing for values with leading zeros
hex: false, // Disable hex number parsing - Invalid bucket name
skipLike: /^[0-9]+$/, // Skip number parsing if the value consists entirely of digits
},
tagValueProcessor: (tagName, tagValue = '') => {
// Ensure that the Name tag is always treated as a string
if (tagName === 'Name') {
return tagValue.toString()
}
return tagValue
},
ignoreAttributes: false, // Ensure that all attributes are parsed
})
const parsedXmlRes = listBucketResultParser.parse(xml)
if (!parsedXmlRes.ListAllMyBucketsResult) {
throw new errors.InvalidXMLError('Missing tag: "ListAllMyBucketsResult"')
}
const { ListAllMyBucketsResult: { Buckets = {} } = {} } = parsedXmlRes
if (Buckets.Bucket) {
result = toArray(Buckets.Bucket).map((bucket = {}) => {
const { Name: bucketName, CreationDate } = bucket
const creationDate = new Date(CreationDate)
return { name: bucketName, creationDate }
})
}
return result
}
export function parseInitiateMultipart(xml: string): string {
let xmlobj = parseXml(xml)
if (!xmlobj.InitiateMultipartUploadResult) {
throw new errors.InvalidXMLError('Missing tag: "InitiateMultipartUploadResult"')
}
xmlobj = xmlobj.InitiateMultipartUploadResult
if (xmlobj.UploadId) {
return xmlobj.UploadId
}
throw new errors.InvalidXMLError('Missing tag: "UploadId"')
}
export function parseReplicationConfig(xml: string): ReplicationConfig {
const xmlObj = parseXml(xml)
const { Role, Rule } = xmlObj.ReplicationConfiguration
return {
ReplicationConfiguration: {
role: Role,
rules: toArray(Rule),
},
}
}
export function parseObjectLegalHoldConfig(xml: string) {
const xmlObj = parseXml(xml)
return xmlObj.LegalHold
}
export function parseTagging(xml: string) {
const xmlObj = parseXml(xml)
let result = []
if (xmlObj.Tagging && xmlObj.Tagging.TagSet && xmlObj.Tagging.TagSet.Tag) {
const tagResult = xmlObj.Tagging.TagSet.Tag
// if it is a single tag convert into an array so that the return value is always an array.
if (isObject(tagResult)) {
result.push(tagResult)
} else {
result = tagResult
}
}
return result
}
// parse XML response when a multipart upload is completed
export function parseCompleteMultipart(xml: string) {
const xmlobj = parseXml(xml).CompleteMultipartUploadResult
if (xmlobj.Location) {
const location = toArray(xmlobj.Location)[0]
const bucket = toArray(xmlobj.Bucket)[0]
const key = xmlobj.Key
const etag = xmlobj.ETag.replace(/^"/g, '')
.replace(/"$/g, '')
.replace(/^&quot;/g, '')
.replace(/&quot;$/g, '')
.replace(/^&#34;/g, '')
.replace(/&#34;$/g, '')
return { location, bucket, key, etag }
}
// Complete Multipart can return XML Error after a 200 OK response
if (xmlobj.Code && xmlobj.Message) {
const errCode = toArray(xmlobj.Code)[0]
const errMessage = toArray(xmlobj.Message)[0]
return { errCode, errMessage }
}
}
type UploadID = string
export type ListMultipartResult = {
uploads: {
key: string
uploadId: UploadID
initiator?: { id: string; displayName: string }
owner?: { id: string; displayName: string }
storageClass: unknown
initiated: Date
}[]
prefixes: {
prefix: string
}[]
isTruncated: boolean
nextKeyMarker: string
nextUploadIdMarker: string
}
// parse XML response for listing in-progress multipart uploads
export function parseListMultipart(xml: string): ListMultipartResult {
const result: ListMultipartResult = {
prefixes: [],
uploads: [],
isTruncated: false,
nextKeyMarker: '',
nextUploadIdMarker: '',
}
let xmlobj = parseXml(xml)
if (!xmlobj.ListMultipartUploadsResult) {
throw new errors.InvalidXMLError('Missing tag: "ListMultipartUploadsResult"')
}
xmlobj = xmlobj.ListMultipartUploadsResult
if (xmlobj.IsTruncated) {
result.isTruncated = xmlobj.IsTruncated
}
if (xmlobj.NextKeyMarker) {
result.nextKeyMarker = xmlobj.NextKeyMarker
}
if (xmlobj.NextUploadIdMarker) {
result.nextUploadIdMarker = xmlobj.nextUploadIdMarker || ''
}
if (xmlobj.CommonPrefixes) {
toArray(xmlobj.CommonPrefixes).forEach((prefix) => {
// @ts-expect-error index check
result.prefixes.push({ prefix: sanitizeObjectKey(toArray<string>(prefix.Prefix)[0]) })
})
}
if (xmlobj.Upload) {
toArray(xmlobj.Upload).forEach((upload) => {
const uploadItem: ListMultipartResult['uploads'][number] = {
key: upload.Key,
uploadId: upload.UploadId,
storageClass: upload.StorageClass,
initiated: new Date(upload.Initiated),
}
if (upload.Initiator) {
uploadItem.initiator = { id: upload.Initiator.ID, displayName: upload.Initiator.DisplayName }
}
if (upload.Owner) {
uploadItem.owner = { id: upload.Owner.ID, displayName: upload.Owner.DisplayName }
}
result.uploads.push(uploadItem)
})
}
return result
}
export function parseObjectLockConfig(xml: string): ObjectLockInfo {
const xmlObj = parseXml(xml)
let lockConfigResult = {} as ObjectLockInfo
if (xmlObj.ObjectLockConfiguration) {
lockConfigResult = {
objectLockEnabled: xmlObj.ObjectLockConfiguration.ObjectLockEnabled,
} as ObjectLockInfo
let retentionResp
if (
xmlObj.ObjectLockConfiguration &&
xmlObj.ObjectLockConfiguration.Rule &&
xmlObj.ObjectLockConfiguration.Rule.DefaultRetention
) {
retentionResp = xmlObj.ObjectLockConfiguration.Rule.DefaultRetention || {}
lockConfigResult.mode = retentionResp.Mode
}
if (retentionResp) {
const isUnitYears = retentionResp.Years
if (isUnitYears) {
lockConfigResult.validity = isUnitYears
lockConfigResult.unit = RETENTION_VALIDITY_UNITS.YEARS
} else {
lockConfigResult.validity = retentionResp.Days
lockConfigResult.unit = RETENTION_VALIDITY_UNITS.DAYS
}
}
}
return lockConfigResult
}
export function parseBucketVersioningConfig(xml: string) {
const xmlObj = parseXml(xml)
return xmlObj.VersioningConfiguration
}
// Used only in selectObjectContent API.
// extractHeaderType extracts the first half of the header message, the header type.
function extractHeaderType(stream: stream.Readable): string | undefined {
const headerNameLen = Buffer.from(stream.read(1)).readUInt8()
const headerNameWithSeparator = Buffer.from(stream.read(headerNameLen)).toString()
const splitBySeparator = (headerNameWithSeparator || '').split(':')
return splitBySeparator.length >= 1 ? splitBySeparator[1] : ''
}
function extractHeaderValue(stream: stream.Readable) {
const bodyLen = Buffer.from(stream.read(2)).readUInt16BE()
return Buffer.from(stream.read(bodyLen)).toString()
}
export function parseSelectObjectContentResponse(res: Buffer) {
const selectResults = new SelectResults({}) // will be returned
const responseStream = readableStream(res) // convert byte array to a readable responseStream
// @ts-ignore
while (responseStream._readableState.length) {
// Top level responseStream read tracker.
let msgCrcAccumulator // accumulate from start of the message till the message crc start.
const totalByteLengthBuffer = Buffer.from(responseStream.read(4))
msgCrcAccumulator = crc32(totalByteLengthBuffer)
const headerBytesBuffer = Buffer.from(responseStream.read(4))
msgCrcAccumulator = crc32(headerBytesBuffer, msgCrcAccumulator)
const calculatedPreludeCrc = msgCrcAccumulator.readInt32BE() // use it to check if any CRC mismatch in header itself.
const preludeCrcBuffer = Buffer.from(responseStream.read(4)) // read 4 bytes i.e 4+4 =8 + 4 = 12 ( prelude + prelude crc)
msgCrcAccumulator = crc32(preludeCrcBuffer, msgCrcAccumulator)
const totalMsgLength = totalByteLengthBuffer.readInt32BE()
const headerLength = headerBytesBuffer.readInt32BE()
const preludeCrcByteValue = preludeCrcBuffer.readInt32BE()
if (preludeCrcByteValue !== calculatedPreludeCrc) {
// Handle Header CRC mismatch Error
throw new Error(
`Header Checksum Mismatch, Prelude CRC of ${preludeCrcByteValue} does not equal expected CRC of ${calculatedPreludeCrc}`,
)
}
const headers: Record<string, unknown> = {}
if (headerLength > 0) {
const headerBytes = Buffer.from(responseStream.read(headerLength))
msgCrcAccumulator = crc32(headerBytes, msgCrcAccumulator)
const headerReaderStream = readableStream(headerBytes)
// @ts-ignore
while (headerReaderStream._readableState.length) {
const headerTypeName = extractHeaderType(headerReaderStream)
headerReaderStream.read(1) // just read and ignore it.
if (headerTypeName) {
headers[headerTypeName] = extractHeaderValue(headerReaderStream)
}
}
}
let payloadStream
const payLoadLength = totalMsgLength - headerLength - 16
if (payLoadLength > 0) {
const payLoadBuffer = Buffer.from(responseStream.read(payLoadLength))
msgCrcAccumulator = crc32(payLoadBuffer, msgCrcAccumulator)
// read the checksum early and detect any mismatch so we can avoid unnecessary further processing.
const messageCrcByteValue = Buffer.from(responseStream.read(4)).readInt32BE()
const calculatedCrc = msgCrcAccumulator.readInt32BE()
// Handle message CRC Error
if (messageCrcByteValue !== calculatedCrc) {
throw new Error(
`Message Checksum Mismatch, Message CRC of ${messageCrcByteValue} does not equal expected CRC of ${calculatedCrc}`,
)
}
payloadStream = readableStream(payLoadBuffer)
}
const messageType = headers['message-type']
switch (messageType) {
case 'error': {
const errorMessage = headers['error-code'] + ':"' + headers['error-message'] + '"'
throw new Error(errorMessage)
}
case 'event': {
const contentType = headers['content-type']
const eventType = headers['event-type']
switch (eventType) {
case 'End': {
selectResults.setResponse(res)
return selectResults
}
case 'Records': {
const readData = payloadStream?.read(payLoadLength)
selectResults.setRecords(readData)
break
}
case 'Progress':
{
switch (contentType) {
case 'text/xml': {
const progressData = payloadStream?.read(payLoadLength)
selectResults.setProgress(progressData.toString())
break
}
default: {
const errorMessage = `Unexpected content-type ${contentType} sent for event-type Progress`
throw new Error(errorMessage)
}
}
}
break
case 'Stats':
{
switch (contentType) {
case 'text/xml': {
const statsData = payloadStream?.read(payLoadLength)
selectResults.setStats(statsData.toString())
break
}
default: {
const errorMessage = `Unexpected content-type ${contentType} sent for event-type Stats`
throw new Error(errorMessage)
}
}
}
break
default: {
// Continuation message: Not sure if it is supported. did not find a reference or any message in response.
// It does not have a payload.
const warningMessage = `Un implemented event detected ${messageType}.`
// eslint-disable-next-line no-console
console.warn(warningMessage)
}
}
}
}
}
}
export function parseLifecycleConfig(xml: string) {
const xmlObj = parseXml(xml)
return xmlObj.LifecycleConfiguration
}
export function parseBucketEncryptionConfig(xml: string) {
return parseXml(xml)
}
export function parseObjectRetentionConfig(xml: string) {
const xmlObj = parseXml(xml)
const retentionConfig = xmlObj.Retention
return {
mode: retentionConfig.Mode,
retainUntilDate: retentionConfig.RetainUntilDate,
}
}
export function removeObjectsParser(xml: string) {
const xmlObj = parseXml(xml)
if (xmlObj.DeleteResult && xmlObj.DeleteResult.Error) {
// return errors as array always. as the response is object in case of single object passed in removeObjects
return toArray(xmlObj.DeleteResult.Error)
}
return []
}
// parse XML response for copy object
export function parseCopyObject(xml: string): CopyObjectResultV1 {
const result: CopyObjectResultV1 = {
etag: '',
lastModified: '',
}
let xmlobj = parseXml(xml)
if (!xmlobj.CopyObjectResult) {
throw new errors.InvalidXMLError('Missing tag: "CopyObjectResult"')
}
xmlobj = xmlobj.CopyObjectResult
if (xmlobj.ETag) {
result.etag = xmlobj.ETag.replace(/^"/g, '')
.replace(/"$/g, '')
.replace(/^&quot;/g, '')
.replace(/&quot;$/g, '')
.replace(/^&#34;/g, '')
.replace(/&#34;$/g, '')
}
if (xmlobj.LastModified) {
result.lastModified = new Date(xmlobj.LastModified)
}
return result
}
const formatObjInfo = (content: ObjectRowEntry, opts: { IsDeleteMarker?: boolean } = {}) => {
const { Key, LastModified, ETag, Size, VersionId, IsLatest } = content
if (!isObject(opts)) {
opts = {}
}
const name = sanitizeObjectKey(toArray(Key)[0] || '')
const lastModified = LastModified ? new Date(toArray(LastModified)[0] || '') : undefined
const etag = sanitizeETag(toArray(ETag)[0] || '')
const size = sanitizeSize(Size || '')
return {
name,
lastModified,
etag,
size,
versionId: VersionId,
isLatest: IsLatest,
isDeleteMarker: opts.IsDeleteMarker ? opts.IsDeleteMarker : false,
}
}
// parse XML response for list objects in a bucket
export function parseListObjects(xml: string) {
const result: { objects: ObjectInfo[]; isTruncated?: boolean; nextMarker?: string; versionIdMarker?: string } = {
objects: [],
isTruncated: false,
nextMarker: undefined,
versionIdMarker: undefined,
}
let isTruncated = false
let nextMarker, nextVersionKeyMarker
const xmlobj = fxpWithoutNumParser.parse(xml)
const parseCommonPrefixesEntity = (commonPrefixEntry: CommonPrefix[]) => {
if (commonPrefixEntry) {
toArray(commonPrefixEntry).forEach((commonPrefix) => {
result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0] || ''), size: 0 })
})
}
}
const listBucketResult: ListBucketResultV1 = xmlobj.ListBucketResult
const listVersionsResult: ListBucketResultV1 = xmlobj.ListVersionsResult
if (listBucketResult) {
if (listBucketResult.IsTruncated) {
isTruncated = listBucketResult.IsTruncated
}
if (listBucketResult.Contents) {
toArray(listBucketResult.Contents).forEach((content) => {
const name = sanitizeObjectKey(toArray(content.Key)[0] || '')
const lastModified = new Date(toArray(content.LastModified)[0] || '')
const etag = sanitizeETag(toArray(content.ETag)[0] || '')
const size = sanitizeSize(content.Size || '')
result.objects.push({ name, lastModified, etag, size })
})
}
if (listBucketResult.Marker) {
nextMarker = listBucketResult.Marker
} else if (isTruncated && result.objects.length > 0) {
nextMarker = result.objects[result.objects.length - 1]?.name
}
if (listBucketResult.CommonPrefixes) {
parseCommonPrefixesEntity(listBucketResult.CommonPrefixes)
}
}
if (listVersionsResult) {
if (listVersionsResult.IsTruncated) {
isTruncated = listVersionsResult.IsTruncated
}
if (listVersionsResult.Version) {
toArray(listVersionsResult.Version).forEach((content) => {
result.objects.push(formatObjInfo(content))
})
}
if (listVersionsResult.DeleteMarker) {
toArray(listVersionsResult.DeleteMarker).forEach((content) => {
result.objects.push(formatObjInfo(content, { IsDeleteMarker: true }))
})
}
if (listVersionsResult.NextKeyMarker) {
nextVersionKeyMarker = listVersionsResult.NextKeyMarker
}
if (listVersionsResult.NextVersionIdMarker) {
result.versionIdMarker = listVersionsResult.NextVersionIdMarker
}
if (listVersionsResult.CommonPrefixes) {
parseCommonPrefixesEntity(listVersionsResult.CommonPrefixes)
}
}
result.isTruncated = isTruncated
if (isTruncated) {
result.nextMarker = nextVersionKeyMarker || nextMarker
}
return result
}
export function uploadPartParser(xml: string) {
const xmlObj = parseXml(xml)
const respEl = xmlObj.CopyPartResult
return respEl
}
+162
View File
@@ -0,0 +1,162 @@
// imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/93cfb0ec069731dcdfc31464788613f7cddb8192/types/minio/index.d.ts
/* eslint-disable @typescript-eslint/no-explicit-any */
import type { LEGAL_HOLD_STATUS, RETENTION_MODES, RETENTION_VALIDITY_UNITS } from './helpers.ts'
import type { ClientOptions, NoResultCallback, RemoveOptions } from './internal/client.ts'
import { TypedClient } from './internal/client.ts'
import { CopyConditions } from './internal/copy-conditions.ts'
import { PostPolicy } from './internal/post-policy.ts'
import type {
BucketItem,
BucketItemCopy,
BucketItemFromList,
BucketItemStat,
BucketItemWithMetadata,
BucketStream,
EmptyObject,
ExistingObjectReplication,
GetObjectLegalHoldOptions,
IncompleteUploadedBucketItem,
InputSerialization,
IsoDate,
ItemBucketMetadata,
ItemBucketMetadataList,
LegalHoldStatus,
LifecycleConfig,
LifecycleRule,
MetadataItem,
ObjectLockInfo,
OutputSerialization,
PutObjectLegalHoldOptions,
ReplicaModifications,
ReplicationConfig,
ReplicationConfigOpts,
ReplicationRule,
ReplicationRuleAnd,
ReplicationRuleDestination,
ReplicationRuleFilter,
ReplicationRuleStatus,
ResultCallback,
Retention,
RetentionOptions,
ScanRange,
SelectOptions,
SelectProgress,
SourceSelectionCriteria,
Tag,
} from './internal/type.ts'
import type { NotificationConfig, NotificationEvent, NotificationPoller } from './notification.ts'
export * from './errors.ts'
export * from './helpers.ts'
export type { Region } from './internal/s3-endpoints.ts'
export type * from './notification.ts'
export * from './notification.ts'
export { CopyConditions, PostPolicy }
export type { MakeBucketOpt } from './internal/client.ts'
export type {
BucketItem,
BucketItemCopy,
BucketItemFromList,
BucketItemStat,
BucketItemWithMetadata,
BucketStream,
ClientOptions,
EmptyObject,
ExistingObjectReplication,
GetObjectLegalHoldOptions,
IncompleteUploadedBucketItem,
InputSerialization,
IsoDate,
ItemBucketMetadata,
ItemBucketMetadataList,
LegalHoldStatus,
LifecycleConfig,
LifecycleRule,
MetadataItem,
NoResultCallback,
ObjectLockInfo,
OutputSerialization,
PutObjectLegalHoldOptions,
RemoveOptions,
ReplicaModifications,
ReplicationConfig,
ReplicationConfigOpts,
ReplicationRule,
ReplicationRuleAnd,
ReplicationRuleDestination,
ReplicationRuleFilter,
ReplicationRuleStatus,
Retention,
RetentionOptions,
ScanRange,
SelectOptions,
SelectProgress,
SourceSelectionCriteria,
Tag,
}
/**
* @deprecated keep for backward compatible, use `RETENTION_MODES` instead
*/
export type Mode = RETENTION_MODES
/**
* @deprecated keep for backward compatible
*/
export type LockUnit = RETENTION_VALIDITY_UNITS
export type VersioningConfig = Record<string | number | symbol, unknown>
export type TagList = Record<string, string>
export interface PostPolicyResult {
postURL: string
formData: {
[key: string]: any
}
}
export interface LockConfig {
mode: RETENTION_MODES
unit: RETENTION_VALIDITY_UNITS
validity: number
}
export interface LegalHoldOptions {
versionId: string
status: LEGAL_HOLD_STATUS
}
export interface SourceObjectStats {
size: number
metaData: string
lastModicied: Date
versionId: string
etag: string
}
// Exports from library
export class Client extends TypedClient {
listObjectsV2(bucketName: string, prefix?: string, recursive?: boolean, startAfter?: string): BucketStream<BucketItem>
// Bucket Policy & Notification operations
getBucketNotification(bucketName: string, callback: ResultCallback<NotificationConfig>): void
getBucketNotification(bucketName: string): Promise<NotificationConfig>
setBucketNotification(
bucketName: string,
bucketNotificationConfig: NotificationConfig,
callback: NoResultCallback,
): void
setBucketNotification(bucketName: string, bucketNotificationConfig: NotificationConfig): Promise<void>
removeAllBucketNotification(bucketName: string, callback: NoResultCallback): void
removeAllBucketNotification(bucketName: string): Promise<void>
listenBucketNotification(
bucketName: string,
prefix: string,
suffix: string,
events: NotificationEvent[],
): NotificationPoller
}
+316
View File
@@ -0,0 +1,316 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as Stream from 'node:stream'
import xml2js from 'xml2js'
import * as errors from './errors.ts'
import { callbackify } from './internal/callbackify.js'
import { TypedClient } from './internal/client.ts'
import { CopyConditions } from './internal/copy-conditions.ts'
import {
isBoolean,
isFunction,
isNumber,
isObject,
isString,
isValidBucketName,
isValidPrefix,
pipesetup,
uriEscape,
} from './internal/helper.ts'
import { PostPolicy } from './internal/post-policy.ts'
import { NotificationConfig, NotificationPoller } from './notification.ts'
import { promisify } from './promisify.js'
import * as transformers from './transformers.js'
export * from './errors.ts'
export * from './helpers.ts'
export * from './notification.ts'
export { CopyConditions, PostPolicy }
export class Client extends TypedClient {
//
// __Arguments__
// * `appName` _string_ - Application name.
// * `appVersion` _string_ - Application version.
// listObjectsV2Query - (List Objects V2) - List some or all (up to 1000) of the objects in a bucket.
//
// You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
// request parameters :-
// * `bucketName` _string_: name of the bucket
// * `prefix` _string_: Limits the response to keys that begin with the specified prefix.
// * `continuation-token` _string_: Used to continue iterating over a set of objects.
// * `delimiter` _string_: A delimiter is a character you use to group keys.
// * `max-keys` _number_: Sets the maximum number of keys returned in the response body.
// * `start-after` _string_: Specifies the key to start after when listing objects in a bucket.
listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, maxKeys, startAfter) {
if (!isValidBucketName(bucketName)) {
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
}
if (!isString(prefix)) {
throw new TypeError('prefix should be of type "string"')
}
if (!isString(continuationToken)) {
throw new TypeError('continuationToken should be of type "string"')
}
if (!isString(delimiter)) {
throw new TypeError('delimiter should be of type "string"')
}
if (!isNumber(maxKeys)) {
throw new TypeError('maxKeys should be of type "number"')
}
if (!isString(startAfter)) {
throw new TypeError('startAfter should be of type "string"')
}
var queries = []
// Call for listing objects v2 API
queries.push(`list-type=2`)
queries.push(`encoding-type=url`)
// escape every value in query string, except maxKeys
queries.push(`prefix=${uriEscape(prefix)}`)
queries.push(`delimiter=${uriEscape(delimiter)}`)
if (continuationToken) {
continuationToken = uriEscape(continuationToken)
queries.push(`continuation-token=${continuationToken}`)
}
// Set start-after
if (startAfter) {
startAfter = uriEscape(startAfter)
queries.push(`start-after=${startAfter}`)
}
// no need to escape maxKeys
if (maxKeys) {
if (maxKeys >= 1000) {
maxKeys = 1000
}
queries.push(`max-keys=${maxKeys}`)
}
queries.sort()
var query = ''
if (queries.length > 0) {
query = `${queries.join('&')}`
}
var method = 'GET'
var transformer = transformers.getListObjectsV2Transformer()
this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => {
if (e) {
return transformer.emit('error', e)
}
pipesetup(response, transformer)
})
return transformer
}
// List the objects in the bucket using S3 ListObjects V2
//
// __Arguments__
// * `bucketName` _string_: name of the bucket
// * `prefix` _string_: the prefix of the objects that should be listed (optional, default `''`)
// * `recursive` _bool_: `true` indicates recursive style listing and `false` indicates directory style listing delimited by '/'. (optional, default `false`)
// * `startAfter` _string_: Specifies the key to start after when listing objects in a bucket. (optional, default `''`)
//
// __Return Value__
// * `stream` _Stream_: stream emitting the objects in the bucket, the object is of the format:
// * `obj.name` _string_: name of the object
// * `obj.prefix` _string_: name of the object prefix
// * `obj.size` _number_: size of the object
// * `obj.etag` _string_: etag of the object
// * `obj.lastModified` _Date_: modified time stamp
listObjectsV2(bucketName, prefix, recursive, startAfter) {
if (prefix === undefined) {
prefix = ''
}
if (recursive === undefined) {
recursive = false
}
if (startAfter === undefined) {
startAfter = ''
}
if (!isValidBucketName(bucketName)) {
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
}
if (!isValidPrefix(prefix)) {
throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`)
}
if (!isString(prefix)) {
throw new TypeError('prefix should be of type "string"')
}
if (!isBoolean(recursive)) {
throw new TypeError('recursive should be of type "boolean"')
}
if (!isString(startAfter)) {
throw new TypeError('startAfter should be of type "string"')
}
// if recursive is false set delimiter to '/'
var delimiter = recursive ? '' : '/'
var continuationToken = ''
var objects = []
var ended = false
var readStream = Stream.Readable({ objectMode: true })
readStream._read = () => {
// push one object per _read()
if (objects.length) {
readStream.push(objects.shift())
return
}
if (ended) {
return readStream.push(null)
}
// if there are no objects to push do query for the next batch of objects
this.listObjectsV2Query(bucketName, prefix, continuationToken, delimiter, 1000, startAfter)
.on('error', (e) => readStream.emit('error', e))
.on('data', (result) => {
if (result.isTruncated) {
continuationToken = result.nextContinuationToken
} else {
ended = true
}
objects = result.objects
readStream._read()
})
}
return readStream
}
// Remove all the notification configurations in the S3 provider
setBucketNotification(bucketName, config, cb) {
if (!isValidBucketName(bucketName)) {
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
}
if (!isObject(config)) {
throw new TypeError('notification config should be of type "Object"')
}
if (!isFunction(cb)) {
throw new TypeError('callback should be of type "function"')
}
var method = 'PUT'
var query = 'notification'
var builder = new xml2js.Builder({
rootName: 'NotificationConfiguration',
renderOpts: { pretty: false },
headless: true,
})
var payload = builder.buildObject(config)
this.makeRequest({ method, bucketName, query }, payload, [200], '', false, cb)
}
removeAllBucketNotification(bucketName, cb) {
this.setBucketNotification(bucketName, new NotificationConfig(), cb)
}
// Return the list of notification configurations stored
// in the S3 provider
getBucketNotification(bucketName, cb) {
if (!isValidBucketName(bucketName)) {
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
}
if (!isFunction(cb)) {
throw new TypeError('callback should be of type "function"')
}
var method = 'GET'
var query = 'notification'
this.makeRequest({ method, bucketName, query }, '', [200], '', true, (e, response) => {
if (e) {
return cb(e)
}
var transformer = transformers.getBucketNotificationTransformer()
var bucketNotification
pipesetup(response, transformer)
.on('data', (result) => (bucketNotification = result))
.on('error', (e) => cb(e))
.on('end', () => cb(null, bucketNotification))
})
}
// Listens for bucket notifications. Returns an EventEmitter.
listenBucketNotification(bucketName, prefix, suffix, events) {
if (!isValidBucketName(bucketName)) {
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
}
if (!isString(prefix)) {
throw new TypeError('prefix must be of type string')
}
if (!isString(suffix)) {
throw new TypeError('suffix must be of type string')
}
if (!Array.isArray(events)) {
throw new TypeError('events must be of type Array')
}
let listener = new NotificationPoller(this, bucketName, prefix, suffix, events)
listener.start()
return listener
}
}
Client.prototype.getBucketNotification = promisify(Client.prototype.getBucketNotification)
Client.prototype.setBucketNotification = promisify(Client.prototype.setBucketNotification)
Client.prototype.removeAllBucketNotification = promisify(Client.prototype.removeAllBucketNotification)
// refactored API use promise internally
Client.prototype.makeBucket = callbackify(Client.prototype.makeBucket)
Client.prototype.bucketExists = callbackify(Client.prototype.bucketExists)
Client.prototype.removeBucket = callbackify(Client.prototype.removeBucket)
Client.prototype.listBuckets = callbackify(Client.prototype.listBuckets)
Client.prototype.getObject = callbackify(Client.prototype.getObject)
Client.prototype.fGetObject = callbackify(Client.prototype.fGetObject)
Client.prototype.getPartialObject = callbackify(Client.prototype.getPartialObject)
Client.prototype.statObject = callbackify(Client.prototype.statObject)
Client.prototype.putObjectRetention = callbackify(Client.prototype.putObjectRetention)
Client.prototype.putObject = callbackify(Client.prototype.putObject)
Client.prototype.fPutObject = callbackify(Client.prototype.fPutObject)
Client.prototype.removeObject = callbackify(Client.prototype.removeObject)
Client.prototype.removeBucketReplication = callbackify(Client.prototype.removeBucketReplication)
Client.prototype.setBucketReplication = callbackify(Client.prototype.setBucketReplication)
Client.prototype.getBucketReplication = callbackify(Client.prototype.getBucketReplication)
Client.prototype.getObjectLegalHold = callbackify(Client.prototype.getObjectLegalHold)
Client.prototype.setObjectLegalHold = callbackify(Client.prototype.setObjectLegalHold)
Client.prototype.setObjectLockConfig = callbackify(Client.prototype.setObjectLockConfig)
Client.prototype.getObjectLockConfig = callbackify(Client.prototype.getObjectLockConfig)
Client.prototype.getBucketPolicy = callbackify(Client.prototype.getBucketPolicy)
Client.prototype.setBucketPolicy = callbackify(Client.prototype.setBucketPolicy)
Client.prototype.getBucketTagging = callbackify(Client.prototype.getBucketTagging)
Client.prototype.getObjectTagging = callbackify(Client.prototype.getObjectTagging)
Client.prototype.setBucketTagging = callbackify(Client.prototype.setBucketTagging)
Client.prototype.removeBucketTagging = callbackify(Client.prototype.removeBucketTagging)
Client.prototype.setObjectTagging = callbackify(Client.prototype.setObjectTagging)
Client.prototype.removeObjectTagging = callbackify(Client.prototype.removeObjectTagging)
Client.prototype.getBucketVersioning = callbackify(Client.prototype.getBucketVersioning)
Client.prototype.setBucketVersioning = callbackify(Client.prototype.setBucketVersioning)
Client.prototype.selectObjectContent = callbackify(Client.prototype.selectObjectContent)
Client.prototype.setBucketLifecycle = callbackify(Client.prototype.setBucketLifecycle)
Client.prototype.getBucketLifecycle = callbackify(Client.prototype.getBucketLifecycle)
Client.prototype.removeBucketLifecycle = callbackify(Client.prototype.removeBucketLifecycle)
Client.prototype.setBucketEncryption = callbackify(Client.prototype.setBucketEncryption)
Client.prototype.getBucketEncryption = callbackify(Client.prototype.getBucketEncryption)
Client.prototype.removeBucketEncryption = callbackify(Client.prototype.removeBucketEncryption)
Client.prototype.getObjectRetention = callbackify(Client.prototype.getObjectRetention)
Client.prototype.removeObjects = callbackify(Client.prototype.removeObjects)
Client.prototype.removeIncompleteUpload = callbackify(Client.prototype.removeIncompleteUpload)
Client.prototype.copyObject = callbackify(Client.prototype.copyObject)
Client.prototype.composeObject = callbackify(Client.prototype.composeObject)
Client.prototype.presignedUrl = callbackify(Client.prototype.presignedUrl)
Client.prototype.presignedGetObject = callbackify(Client.prototype.presignedGetObject)
Client.prototype.presignedPutObject = callbackify(Client.prototype.presignedPutObject)
Client.prototype.presignedPostPolicy = callbackify(Client.prototype.presignedPostPolicy)
+254
View File
@@ -0,0 +1,254 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { EventEmitter } from 'eventemitter3'
import jsonLineParser from 'stream-json/jsonl/Parser.js'
import { DEFAULT_REGION } from './helpers.ts'
import type { TypedClient } from './internal/client.ts'
import { pipesetup, uriEscape } from './internal/helper.ts'
// TODO: type this
type Event = unknown
// Base class for three supported configs.
export class TargetConfig {
private Filter?: { S3Key: { FilterRule: { Name: string; Value: string }[] } }
private Event?: Event[]
private Id: unknown
setId(id: unknown) {
this.Id = id
}
addEvent(newevent: Event) {
if (!this.Event) {
this.Event = []
}
this.Event.push(newevent)
}
addFilterSuffix(suffix: string) {
if (!this.Filter) {
this.Filter = { S3Key: { FilterRule: [] } }
}
this.Filter.S3Key.FilterRule.push({ Name: 'suffix', Value: suffix })
}
addFilterPrefix(prefix: string) {
if (!this.Filter) {
this.Filter = { S3Key: { FilterRule: [] } }
}
this.Filter.S3Key.FilterRule.push({ Name: 'prefix', Value: prefix })
}
}
// 1. Topic (simple notification service)
export class TopicConfig extends TargetConfig {
private Topic: string
constructor(arn: string) {
super()
this.Topic = arn
}
}
// 2. Queue (simple queue service)
export class QueueConfig extends TargetConfig {
private Queue: string
constructor(arn: string) {
super()
this.Queue = arn
}
}
// 3. CloudFront (lambda function)
export class CloudFunctionConfig extends TargetConfig {
private CloudFunction: string
constructor(arn: string) {
super()
this.CloudFunction = arn
}
}
// Notification config - array of target configs.
// Target configs can be
// 1. Topic (simple notification service)
// 2. Queue (simple queue service)
// 3. CloudFront (lambda function)
export class NotificationConfig {
private TopicConfiguration?: TargetConfig[]
private CloudFunctionConfiguration?: TargetConfig[]
private QueueConfiguration?: TargetConfig[]
add(target: TargetConfig) {
let instance: TargetConfig[] | undefined
if (target instanceof TopicConfig) {
instance = this.TopicConfiguration ??= []
}
if (target instanceof QueueConfig) {
instance = this.QueueConfiguration ??= []
}
if (target instanceof CloudFunctionConfig) {
instance = this.CloudFunctionConfiguration ??= []
}
if (instance) {
instance.push(target)
}
}
}
export const buildARN = (partition: string, service: string, region: string, accountId: string, resource: string) => {
return 'arn:' + partition + ':' + service + ':' + region + ':' + accountId + ':' + resource
}
export const ObjectCreatedAll = 's3:ObjectCreated:*'
export const ObjectCreatedPut = 's3:ObjectCreated:Put'
export const ObjectCreatedPost = 's3:ObjectCreated:Post'
export const ObjectCreatedCopy = 's3:ObjectCreated:Copy'
export const ObjectCreatedCompleteMultipartUpload = 's3:ObjectCreated:CompleteMultipartUpload'
export const ObjectRemovedAll = 's3:ObjectRemoved:*'
export const ObjectRemovedDelete = 's3:ObjectRemoved:Delete'
export const ObjectRemovedDeleteMarkerCreated = 's3:ObjectRemoved:DeleteMarkerCreated'
export const ObjectReducedRedundancyLostObject = 's3:ReducedRedundancyLostObject'
export type NotificationEvent =
| 's3:ObjectCreated:*'
| 's3:ObjectCreated:Put'
| 's3:ObjectCreated:Post'
| 's3:ObjectCreated:Copy'
| 's3:ObjectCreated:CompleteMultipartUpload'
| 's3:ObjectRemoved:*'
| 's3:ObjectRemoved:Delete'
| 's3:ObjectRemoved:DeleteMarkerCreated'
| 's3:ReducedRedundancyLostObject'
| 's3:TestEvent'
| 's3:ObjectRestore:Post'
| 's3:ObjectRestore:Completed'
| 's3:Replication:OperationFailedReplication'
| 's3:Replication:OperationMissedThreshold'
| 's3:Replication:OperationReplicatedAfterThreshold'
| 's3:Replication:OperationNotTracked'
| string // put string at least so auto-complete could work
// TODO: type this
export type NotificationRecord = unknown
// Poll for notifications, used in #listenBucketNotification.
// Listening constitutes repeatedly requesting s3 whether or not any
// changes have occurred.
export class NotificationPoller extends EventEmitter<{
notification: (event: NotificationRecord) => void
error: (error: unknown) => void
}> {
private client: TypedClient
private bucketName: string
private prefix: string
private suffix: string
private events: NotificationEvent[]
private ending: boolean
constructor(client: TypedClient, bucketName: string, prefix: string, suffix: string, events: NotificationEvent[]) {
super()
this.client = client
this.bucketName = bucketName
this.prefix = prefix
this.suffix = suffix
this.events = events
this.ending = false
}
// Starts the polling.
start() {
this.ending = false
process.nextTick(() => {
this.checkForChanges()
})
}
// Stops the polling.
stop() {
this.ending = true
}
checkForChanges() {
// Don't continue if we're looping again but are cancelled.
if (this.ending) {
return
}
const method = 'GET'
const queries = []
if (this.prefix) {
const prefix = uriEscape(this.prefix)
queries.push(`prefix=${prefix}`)
}
if (this.suffix) {
const suffix = uriEscape(this.suffix)
queries.push(`suffix=${suffix}`)
}
if (this.events) {
this.events.forEach((s3event) => queries.push('events=' + uriEscape(s3event)))
}
queries.sort()
let query = ''
if (queries.length > 0) {
query = `${queries.join('&')}`
}
const region = this.client.region || DEFAULT_REGION
this.client.makeRequestAsync({ method, bucketName: this.bucketName, query }, '', [200], region).then(
(response) => {
const asm = jsonLineParser.make()
pipesetup(response, asm)
.on('data', (data) => {
// Data is flushed periodically (every 5 seconds), so we should
// handle it after flushing from the JSON parser.
let records = data.value.Records
// If null (= no records), change to an empty array.
if (!records) {
records = []
}
// Iterate over the notifications and emit them individually.
records.forEach((record: NotificationRecord) => {
this.emit('notification', record)
})
// If we're done, stop.
if (this.ending) {
response?.destroy()
}
})
.on('error', (e) => this.emit('error', e))
.on('end', () => {
// Do it again, if we haven't cancelled yet.
process.nextTick(() => {
this.checkForChanges()
})
})
},
(e) => {
return this.emit('error', e)
},
)
}
}
+31
View File
@@ -0,0 +1,31 @@
// Returns a wrapper function that will promisify a given callback function.
// It will preserve 'this'.
export function promisify(fn) {
return function () {
// If the last argument is a function, assume its the callback.
let callback = arguments[arguments.length - 1]
// If the callback is given, don't promisify, just pass straight in.
if (typeof callback === 'function') {
return fn.apply(this, arguments)
}
// Otherwise, create a new set of arguments, and wrap
// it in a promise.
let args = [...arguments]
return new Promise((resolve, reject) => {
// Add the callback function.
args.push((err, value) => {
if (err) {
return reject(err)
}
resolve(value)
})
// Call the function with our special adaptor callback added.
fn.apply(this, args)
})
}
}
+325
View File
@@ -0,0 +1,325 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2016 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as crypto from 'node:crypto'
import * as errors from './errors.ts'
import { PRESIGN_EXPIRY_DAYS_MAX } from './helpers.ts'
import { getScope, isNumber, isObject, isString, makeDateLong, makeDateShort, uriEscape } from './internal/helper.ts'
import type { ICanonicalRequest, IRequest, RequestHeaders } from './internal/type.ts'
const signV4Algorithm = 'AWS4-HMAC-SHA256'
// getCanonicalRequest generate a canonical request of style.
//
// canonicalRequest =
// <HTTPMethod>\n
// <CanonicalURI>\n
// <CanonicalQueryString>\n
// <CanonicalHeaders>\n
// <SignedHeaders>\n
// <HashedPayload>
//
function getCanonicalRequest(
method: string,
path: string,
headers: RequestHeaders,
signedHeaders: string[],
hashedPayload: string,
): ICanonicalRequest {
if (!isString(method)) {
throw new TypeError('method should be of type "string"')
}
if (!isString(path)) {
throw new TypeError('path should be of type "string"')
}
if (!isObject(headers)) {
throw new TypeError('headers should be of type "object"')
}
if (!Array.isArray(signedHeaders)) {
throw new TypeError('signedHeaders should be of type "array"')
}
if (!isString(hashedPayload)) {
throw new TypeError('hashedPayload should be of type "string"')
}
const headersArray = signedHeaders.reduce((acc, i) => {
// Trim spaces from the value (required by V4 spec)
const val = `${headers[i]}`.replace(/ +/g, ' ')
acc.push(`${i.toLowerCase()}:${val}`)
return acc
}, [] as string[])
const requestResource = path.split('?')[0]
let requestQuery = path.split('?')[1]
if (!requestQuery) {
requestQuery = ''
}
if (requestQuery) {
requestQuery = requestQuery
.split('&')
.sort()
.map((element) => (!element.includes('=') ? element + '=' : element))
.join('&')
}
return [
method.toUpperCase(),
requestResource,
requestQuery,
headersArray.join('\n') + '\n',
signedHeaders.join(';').toLowerCase(),
hashedPayload,
].join('\n')
}
// generate a credential string
function getCredential(accessKey: string, region: string, requestDate?: Date, serviceName = 's3') {
if (!isString(accessKey)) {
throw new TypeError('accessKey should be of type "string"')
}
if (!isString(region)) {
throw new TypeError('region should be of type "string"')
}
if (!isObject(requestDate)) {
throw new TypeError('requestDate should be of type "object"')
}
return `${accessKey}/${getScope(region, requestDate, serviceName)}`
}
// Returns signed headers array - alphabetically sorted
function getSignedHeaders(headers: RequestHeaders): string[] {
if (!isObject(headers)) {
throw new TypeError('request should be of type "object"')
}
// Excerpts from @lsegal - https://github.com/aws/aws-sdk-js/issues/659#issuecomment-120477258
//
// User-Agent:
//
// This is ignored from signing because signing this causes problems with generating pre-signed URLs
// (that are executed by other agents) or when customers pass requests through proxies, which may
// modify the user-agent.
//
// Content-Length:
//
// This is ignored from signing because generating a pre-signed URL should not provide a content-length
// constraint, specifically when vending a S3 pre-signed PUT URL. The corollary to this is that when
// sending regular requests (non-pre-signed), the signature contains a checksum of the body, which
// implicitly validates the payload length (since changing the number of bytes would change the checksum)
// and therefore this header is not valuable in the signature.
//
// Content-Type:
//
// Signing this header causes quite a number of problems in browser environments, where browsers
// like to modify and normalize the content-type header in different ways. There is more information
// on this in https://github.com/aws/aws-sdk-js/issues/244. Avoiding this field simplifies logic
// and reduces the possibility of future bugs
//
// Authorization:
//
// Is skipped for obvious reasons
const ignoredHeaders = ['authorization', 'content-length', 'content-type', 'user-agent']
return Object.keys(headers)
.filter((header) => !ignoredHeaders.includes(header))
.sort()
}
// returns the key used for calculating signature
function getSigningKey(date: Date, region: string, secretKey: string, serviceName = 's3') {
if (!isObject(date)) {
throw new TypeError('date should be of type "object"')
}
if (!isString(region)) {
throw new TypeError('region should be of type "string"')
}
if (!isString(secretKey)) {
throw new TypeError('secretKey should be of type "string"')
}
const dateLine = makeDateShort(date)
const hmac1 = crypto
.createHmac('sha256', 'AWS4' + secretKey)
.update(dateLine)
.digest(),
hmac2 = crypto.createHmac('sha256', hmac1).update(region).digest(),
hmac3 = crypto.createHmac('sha256', hmac2).update(serviceName).digest()
return crypto.createHmac('sha256', hmac3).update('aws4_request').digest()
}
// returns the string that needs to be signed
function getStringToSign(canonicalRequest: ICanonicalRequest, requestDate: Date, region: string, serviceName = 's3') {
if (!isString(canonicalRequest)) {
throw new TypeError('canonicalRequest should be of type "string"')
}
if (!isObject(requestDate)) {
throw new TypeError('requestDate should be of type "object"')
}
if (!isString(region)) {
throw new TypeError('region should be of type "string"')
}
const hash = crypto.createHash('sha256').update(canonicalRequest).digest('hex')
const scope = getScope(region, requestDate, serviceName)
const stringToSign = [signV4Algorithm, makeDateLong(requestDate), scope, hash]
return stringToSign.join('\n')
}
// calculate the signature of the POST policy
export function postPresignSignatureV4(region: string, date: Date, secretKey: string, policyBase64: string): string {
if (!isString(region)) {
throw new TypeError('region should be of type "string"')
}
if (!isObject(date)) {
throw new TypeError('date should be of type "object"')
}
if (!isString(secretKey)) {
throw new TypeError('secretKey should be of type "string"')
}
if (!isString(policyBase64)) {
throw new TypeError('policyBase64 should be of type "string"')
}
const signingKey = getSigningKey(date, region, secretKey)
return crypto.createHmac('sha256', signingKey).update(policyBase64).digest('hex').toLowerCase()
}
// Returns the authorization header
export function signV4(
request: IRequest,
accessKey: string,
secretKey: string,
region: string,
requestDate: Date,
sha256sum: string,
serviceName = 's3',
) {
if (!isObject(request)) {
throw new TypeError('request should be of type "object"')
}
if (!isString(accessKey)) {
throw new TypeError('accessKey should be of type "string"')
}
if (!isString(secretKey)) {
throw new TypeError('secretKey should be of type "string"')
}
if (!isString(region)) {
throw new TypeError('region should be of type "string"')
}
if (!accessKey) {
throw new errors.AccessKeyRequiredError('accessKey is required for signing')
}
if (!secretKey) {
throw new errors.SecretKeyRequiredError('secretKey is required for signing')
}
const signedHeaders = getSignedHeaders(request.headers)
const canonicalRequest = getCanonicalRequest(request.method, request.path, request.headers, signedHeaders, sha256sum)
const serviceIdentifier = serviceName || 's3'
const stringToSign = getStringToSign(canonicalRequest, requestDate, region, serviceIdentifier)
const signingKey = getSigningKey(requestDate, region, secretKey, serviceIdentifier)
const credential = getCredential(accessKey, region, requestDate, serviceIdentifier)
const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex').toLowerCase()
return `${signV4Algorithm} Credential=${credential}, SignedHeaders=${signedHeaders
.join(';')
.toLowerCase()}, Signature=${signature}`
}
export function signV4ByServiceName(
request: IRequest,
accessKey: string,
secretKey: string,
region: string,
requestDate: Date,
contentSha256: string,
serviceName = 's3',
): string {
return signV4(request, accessKey, secretKey, region, requestDate, contentSha256, serviceName)
}
// returns a presigned URL string
export function presignSignatureV4(
request: IRequest,
accessKey: string,
secretKey: string,
sessionToken: string | undefined,
region: string,
requestDate: Date,
expires: number | undefined,
) {
if (!isObject(request)) {
throw new TypeError('request should be of type "object"')
}
if (!isString(accessKey)) {
throw new TypeError('accessKey should be of type "string"')
}
if (!isString(secretKey)) {
throw new TypeError('secretKey should be of type "string"')
}
if (!isString(region)) {
throw new TypeError('region should be of type "string"')
}
if (!accessKey) {
throw new errors.AccessKeyRequiredError('accessKey is required for presigning')
}
if (!secretKey) {
throw new errors.SecretKeyRequiredError('secretKey is required for presigning')
}
if (expires && !isNumber(expires)) {
throw new TypeError('expires should be of type "number"')
}
if (expires && expires < 1) {
throw new errors.ExpiresParamError('expires param cannot be less than 1 seconds')
}
if (expires && expires > PRESIGN_EXPIRY_DAYS_MAX) {
throw new errors.ExpiresParamError('expires param cannot be greater than 7 days')
}
const iso8601Date = makeDateLong(requestDate)
const signedHeaders = getSignedHeaders(request.headers)
const credential = getCredential(accessKey, region, requestDate)
const hashedPayload = 'UNSIGNED-PAYLOAD'
const requestQuery: string[] = []
requestQuery.push(`X-Amz-Algorithm=${signV4Algorithm}`)
requestQuery.push(`X-Amz-Credential=${uriEscape(credential)}`)
requestQuery.push(`X-Amz-Date=${iso8601Date}`)
requestQuery.push(`X-Amz-Expires=${expires}`)
requestQuery.push(`X-Amz-SignedHeaders=${uriEscape(signedHeaders.join(';').toLowerCase())}`)
if (sessionToken) {
requestQuery.push(`X-Amz-Security-Token=${uriEscape(sessionToken)}`)
}
const resource = request.path.split('?')[0]
let query = request.path.split('?')[1]
if (query) {
query = query + '&' + requestQuery.join('&')
} else {
query = requestQuery.join('&')
}
const path = resource + '?' + query
const canonicalRequest = getCanonicalRequest(request.method, path, request.headers, signedHeaders, hashedPayload)
const stringToSign = getStringToSign(canonicalRequest, requestDate, region)
const signingKey = getSigningKey(requestDate, region, secretKey)
const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex').toLowerCase()
return request.protocol + '//' + request.headers.host + path + `&X-Amz-Signature=${signature}`
}
+111
View File
@@ -0,0 +1,111 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015, 2016 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as Crypto from 'node:crypto'
import Through2 from 'through2'
import { isFunction } from './internal/helper.ts'
import * as xmlParsers from './xml-parsers.js'
// getConcater returns a stream that concatenates the input and emits
// the concatenated output when 'end' has reached. If an optional
// parser function is passed upon reaching the 'end' of the stream,
// `parser(concatenated_data)` will be emitted.
export function getConcater(parser, emitError) {
var objectMode = false
var bufs = []
if (parser && !isFunction(parser)) {
throw new TypeError('parser should be of type "function"')
}
if (parser) {
objectMode = true
}
return Through2(
{ objectMode },
function (chunk, enc, cb) {
bufs.push(chunk)
cb()
},
function (cb) {
if (emitError) {
cb(parser(Buffer.concat(bufs).toString()))
// cb(e) would mean we have to emit 'end' by explicitly calling this.push(null)
this.push(null)
return
}
if (bufs.length) {
if (parser) {
this.push(parser(Buffer.concat(bufs).toString()))
} else {
this.push(Buffer.concat(bufs))
}
}
cb()
},
)
}
// A through stream that calculates md5sum and sha256sum
export function getHashSummer(enableSHA256) {
var md5 = Crypto.createHash('md5')
var sha256 = Crypto.createHash('sha256')
return Through2.obj(
function (chunk, enc, cb) {
if (enableSHA256) {
sha256.update(chunk)
} else {
md5.update(chunk)
}
cb()
},
function (cb) {
var md5sum = ''
var sha256sum = ''
if (enableSHA256) {
sha256sum = sha256.digest('hex')
} else {
md5sum = md5.digest('base64')
}
var hashData = { md5sum, sha256sum }
this.push(hashData)
this.push(null)
cb()
},
)
}
// Following functions return a stream object that parses XML
// and emits suitable Javascript objects.
// Parses listObjects response.
export function getListObjectsV2Transformer() {
return getConcater(xmlParsers.parseListObjectsV2)
}
// Parses listObjects with metadata response.
export function getListObjectsV2WithMetadataTransformer() {
return getConcater(xmlParsers.parseListObjectsV2WithMetadata)
}
// Parses GET/SET BucketNotification response
export function getBucketNotificationTransformer() {
return getConcater(xmlParsers.parseBucketNotification)
}
+167
View File
@@ -0,0 +1,167 @@
/*
* MinIO Javascript Library for Amazon S3 Compatible Cloud Storage, (C) 2015 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as errors from './errors.ts'
import { parseXml, sanitizeETag, sanitizeObjectKey, toArray } from './internal/helper.ts'
// parse XML response for bucket notification
export function parseBucketNotification(xml) {
var result = {
TopicConfiguration: [],
QueueConfiguration: [],
CloudFunctionConfiguration: [],
}
// Parse the events list
var genEvents = function (events) {
var result = []
if (events) {
toArray(events).forEach((s3event) => {
result.push(s3event)
})
}
return result
}
// Parse all filter rules
var genFilterRules = function (filters) {
var result = []
if (filters) {
filters = toArray(filters)
if (filters[0].S3Key) {
filters[0].S3Key = toArray(filters[0].S3Key)
if (filters[0].S3Key[0].FilterRule) {
toArray(filters[0].S3Key[0].FilterRule).forEach((rule) => {
var Name = toArray(rule.Name)[0]
var Value = toArray(rule.Value)[0]
result.push({ Name, Value })
})
}
}
}
return result
}
var xmlobj = parseXml(xml)
xmlobj = xmlobj.NotificationConfiguration
// Parse all topic configurations in the xml
if (xmlobj.TopicConfiguration) {
toArray(xmlobj.TopicConfiguration).forEach((config) => {
var Id = toArray(config.Id)[0]
var Topic = toArray(config.Topic)[0]
var Event = genEvents(config.Event)
var Filter = genFilterRules(config.Filter)
result.TopicConfiguration.push({ Id, Topic, Event, Filter })
})
}
// Parse all topic configurations in the xml
if (xmlobj.QueueConfiguration) {
toArray(xmlobj.QueueConfiguration).forEach((config) => {
var Id = toArray(config.Id)[0]
var Queue = toArray(config.Queue)[0]
var Event = genEvents(config.Event)
var Filter = genFilterRules(config.Filter)
result.QueueConfiguration.push({ Id, Queue, Event, Filter })
})
}
// Parse all QueueConfiguration arrays
if (xmlobj.CloudFunctionConfiguration) {
toArray(xmlobj.CloudFunctionConfiguration).forEach((config) => {
var Id = toArray(config.Id)[0]
var CloudFunction = toArray(config.CloudFunction)[0]
var Event = genEvents(config.Event)
var Filter = genFilterRules(config.Filter)
result.CloudFunctionConfiguration.push({ Id, CloudFunction, Event, Filter })
})
}
return result
}
// parse XML response for list objects v2 in a bucket
export function parseListObjectsV2(xml) {
var result = {
objects: [],
isTruncated: false,
}
var xmlobj = parseXml(xml)
if (!xmlobj.ListBucketResult) {
throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"')
}
xmlobj = xmlobj.ListBucketResult
if (xmlobj.IsTruncated) {
result.isTruncated = xmlobj.IsTruncated
}
if (xmlobj.NextContinuationToken) {
result.nextContinuationToken = xmlobj.NextContinuationToken
}
if (xmlobj.Contents) {
toArray(xmlobj.Contents).forEach((content) => {
var name = sanitizeObjectKey(toArray(content.Key)[0])
var lastModified = new Date(content.LastModified)
var etag = sanitizeETag(content.ETag)
var size = content.Size
result.objects.push({ name, lastModified, etag, size })
})
}
if (xmlobj.CommonPrefixes) {
toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => {
result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 })
})
}
return result
}
// parse XML response for list objects v2 with metadata in a bucket
export function parseListObjectsV2WithMetadata(xml) {
var result = {
objects: [],
isTruncated: false,
}
var xmlobj = parseXml(xml)
if (!xmlobj.ListBucketResult) {
throw new errors.InvalidXMLError('Missing tag: "ListBucketResult"')
}
xmlobj = xmlobj.ListBucketResult
if (xmlobj.IsTruncated) {
result.isTruncated = xmlobj.IsTruncated
}
if (xmlobj.NextContinuationToken) {
result.nextContinuationToken = xmlobj.NextContinuationToken
}
if (xmlobj.Contents) {
toArray(xmlobj.Contents).forEach((content) => {
var name = sanitizeObjectKey(content.Key)
var lastModified = new Date(content.LastModified)
var etag = sanitizeETag(content.ETag)
var size = content.Size
var metadata
if (content.UserMetadata != null) {
metadata = toArray(content.UserMetadata)[0]
} else {
metadata = null
}
result.objects.push({ name, lastModified, etag, size, metadata })
})
}
if (xmlobj.CommonPrefixes) {
toArray(xmlobj.CommonPrefixes).forEach((commonPrefix) => {
result.objects.push({ prefix: sanitizeObjectKey(toArray(commonPrefix.Prefix)[0]), size: 0 })
})
}
return result
}