diff --git a/src/http/routes/object/createObject.ts b/src/http/routes/object/createObject.ts index 78a95998..576eba0f 100644 --- a/src/http/routes/object/createObject.ts +++ b/src/http/routes/object/createObject.ts @@ -3,6 +3,7 @@ import { FromSchema } from 'json-schema-to-ts' import { createDefaultSchema } from '../../routes-helper' import { ROUTE_OPERATIONS } from '../operations' import fastifyMultipart from '@fastify/multipart' +import { fileUploadFromRequest } from '@storage/uploader' const createObjectParamsSchema = { type: 'object', @@ -74,14 +75,26 @@ export default async function routes(fastify: FastifyInstance) { const isUpsert = request.headers['x-upsert'] === 'true' const owner = request.owner - const { objectMetadata, path, id } = await request.storage - .from(bucketName) - .uploadFromRequest(request, { + // Get bucket information once for better error context + const bucket = await request.storage + .asSuperUser() + .findBucket(bucketName, 'id, name, file_size_limit, allowed_mime_types') + + const { objectMetadata, path, id } = await request.storage.from(bucketName).uploadNewObject({ + file: await fileUploadFromRequest(request, { objectName, - signal: request.signals.body.signal, - owner: owner, - isUpsert, - }) + fileSizeLimit: bucket.file_size_limit, + allowedMimeTypes: bucket.allowed_mime_types || [], + }), + objectName, + signal: request.signals.body.signal, + owner: owner, + isUpsert, + bucketContext: { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + }, + }) return response.status(objectMetadata?.httpStatusCode ?? 200).send({ Id: id, diff --git a/src/http/routes/object/updateObject.ts b/src/http/routes/object/updateObject.ts index d68d0466..1c87f2bc 100644 --- a/src/http/routes/object/updateObject.ts +++ b/src/http/routes/object/updateObject.ts @@ -3,6 +3,7 @@ import { FromSchema } from 'json-schema-to-ts' import { createDefaultSchema } from '../../routes-helper' import { ROUTE_OPERATIONS } from '../operations' import fastifyMultipart from '@fastify/multipart' +import { fileUploadFromRequest } from '@storage/uploader' const updateObjectParamsSchema = { type: 'object', @@ -69,14 +70,26 @@ export default async function routes(fastify: FastifyInstance) { const objectName = request.params['*'] const owner = request.owner as string - const { objectMetadata, path, id } = await request.storage - .from(bucketName) - .uploadFromRequest(request, { + // Get bucket information once for better error context + const bucket = await request.storage + .asSuperUser() + .findBucket(bucketName, 'id, name, file_size_limit, allowed_mime_types') + + const { objectMetadata, path, id } = await request.storage.from(bucketName).uploadNewObject({ + file: await fileUploadFromRequest(request, { objectName, - signal: request.signals.body.signal, - owner: owner, - isUpsert: true, - }) + fileSizeLimit: bucket.file_size_limit, + allowedMimeTypes: bucket.allowed_mime_types || [], + }), + objectName, + signal: request.signals.body.signal, + owner: owner, + isUpsert: true, + bucketContext: { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + }, + }) return response.status(objectMetadata?.httpStatusCode ?? 200).send({ Id: id, diff --git a/src/http/routes/object/uploadSignedObject.ts b/src/http/routes/object/uploadSignedObject.ts index 5b01aa46..e9555107 100644 --- a/src/http/routes/object/uploadSignedObject.ts +++ b/src/http/routes/object/uploadSignedObject.ts @@ -2,6 +2,7 @@ import { FastifyInstance } from 'fastify' import { FromSchema } from 'json-schema-to-ts' import { ROUTE_OPERATIONS } from '../operations' import fastifyMultipart from '@fastify/multipart' +import { fileUploadFromRequest } from '@storage/uploader' const uploadSignedObjectParamsSchema = { type: 'object', @@ -87,14 +88,28 @@ export default async function routes(fastify: FastifyInstance) { .from(bucketName) .verifyObjectSignature(token, objectName) + // Get bucket information once for better error context + const bucket = await request.storage + .asSuperUser() + .findBucket(bucketName, 'id, name, file_size_limit, allowed_mime_types') + const { objectMetadata, path } = await request.storage .asSuperUser() .from(bucketName) - .uploadFromRequest(request, { - owner, + .uploadNewObject({ + file: await fileUploadFromRequest(request, { + objectName, + fileSizeLimit: bucket.file_size_limit, + allowedMimeTypes: bucket.allowed_mime_types || [], + }), objectName, + owner, isUpsert: upsert, signal: request.signals.body.signal, + bucketContext: { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + }, }) return response.status(objectMetadata?.httpStatusCode ?? 200).send({ diff --git a/src/http/routes/s3/commands/put-object.ts b/src/http/routes/s3/commands/put-object.ts index 7c0712a4..403c310c 100644 --- a/src/http/routes/s3/commands/put-object.ts +++ b/src/http/routes/s3/commands/put-object.ts @@ -127,7 +127,7 @@ export default function PutObject(s3Router: S3Router) { const bucket = await ctx.storage .asSuperUser() - .findBucket(req.Params.Bucket, 'id,file_size_limit,allowed_mime_types') + .findBucket(req.Params.Bucket, 'id,name,file_size_limit,allowed_mime_types') const uploadRequest = await fileUploadFromRequest(ctx.req, { objectName: key, @@ -137,7 +137,11 @@ export default function PutObject(s3Router: S3Router) { return pipeline( uploadRequest.body, - new ByteLimitTransformStream(uploadRequest.maxFileSize), + new ByteLimitTransformStream(uploadRequest.maxFileSize, { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + globalLimit: uploadRequest.globalLimit, + }), ctx.req.streamingSignatureV4 || new PassThrough(), async (fileStream) => { return s3Protocol.putObject( @@ -151,7 +155,15 @@ export default function PutObject(s3Router: S3Router) { ContentEncoding: req.Headers?.['content-encoding'], Metadata: metadata, }, - { signal: ctx.signals.body, isTruncated: uploadRequest.isTruncated } + { + signal: ctx.signals.body, + isTruncated: uploadRequest.isTruncated, + bucketContext: { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + globalLimit: uploadRequest.globalLimit, + }, + } ) } ) @@ -176,29 +188,45 @@ export default function PutObject(s3Router: S3Router) { const bucket = await ctx.storage .asSuperUser() - .findBucket(req.Params.Bucket, 'id,file_size_limit,allowed_mime_types') + .findBucket(req.Params.Bucket, 'id,name,file_size_limit,allowed_mime_types') const fieldsObject = fieldsToObject(file?.fields || {}) const metadata = s3Protocol.parseMetadataHeaders(fieldsObject) const expiresField = fieldsObject.expires - const maxFileSize = await getStandardMaxFileSizeLimit(ctx.tenantId, bucket.file_size_limit) - - return pipeline(file.file, new ByteLimitTransformStream(maxFileSize), async (fileStream) => { - return s3Protocol.putObject( - { - Body: fileStream as stream.Readable, - Bucket: req.Params.Bucket, - Key: fieldsObject.key as string, - CacheControl: fieldsObject['cache-control'] as string, - ContentType: fieldsObject['content-type'] as string, - Expires: expiresField ? new Date(expiresField) : undefined, - ContentEncoding: fieldsObject['content-encoding'] as string, - Metadata: metadata, - }, - { signal: ctx.signals.body, isTruncated: () => file.file.truncated } - ) - }) + const limits = await getStandardMaxFileSizeLimit(ctx.tenantId, bucket.file_size_limit) + + return pipeline( + file.file, + new ByteLimitTransformStream(limits.maxFileSize, { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + globalLimit: limits.globalLimit, + }), + async (fileStream) => { + return s3Protocol.putObject( + { + Body: fileStream as stream.Readable, + Bucket: req.Params.Bucket, + Key: fieldsObject.key as string, + CacheControl: fieldsObject['cache-control'] as string, + ContentType: fieldsObject['content-type'] as string, + Expires: expiresField ? new Date(expiresField) : undefined, + ContentEncoding: fieldsObject['content-encoding'] as string, + Metadata: metadata, + }, + { + signal: ctx.signals.body, + isTruncated: () => file.file.truncated, + bucketContext: { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + globalLimit: limits.globalLimit, + }, + } + ) + } + ) } ) } diff --git a/src/internal/errors/codes.ts b/src/internal/errors/codes.ts index e1c63dd8..43ca2e32 100644 --- a/src/internal/errors/codes.ts +++ b/src/internal/errors/codes.ts @@ -1,5 +1,15 @@ import { StorageBackendError } from './storage-error' +function formatBytes(bytes: number): string { + if (bytes === 0) return '0 B' + + const k = 1024 + const sizes = ['B', 'KB', 'MB', 'GB', 'TB'] + const i = Math.floor(Math.log(bytes) / Math.log(k)) + + return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i] +} + export enum ErrorCode { NoSuchBucket = 'NoSuchBucket', NoSuchKey = 'NoSuchKey', @@ -232,7 +242,7 @@ export const ERRORS = { error: 'invalid_mime_type', code: ErrorCode.InvalidMimeType, httpStatusCode: 415, - message: `mime type ${mimeType} is not supported`, + message: `MIME type ${mimeType} is not supported`, }), InvalidRange: () => @@ -243,12 +253,31 @@ export const ERRORS = { message: `invalid range provided`, }), - EntityTooLarge: (e?: Error, entity = 'object') => + EntityTooLarge: ( + e?: Error, + entity = 'object', + context?: { bucketName?: string; bucketLimit?: number; globalLimit?: number } + ) => new StorageBackendError({ error: 'Payload too large', code: ErrorCode.EntityTooLarge, httpStatusCode: 413, - message: `The ${entity} exceeded the maximum allowed size`, + message: + context?.bucketName && context?.bucketLimit + ? `The ${entity} exceeded the maximum allowed size for bucket "${ + context.bucketName + }" (${formatBytes(context.bucketLimit)}). ${ + context.globalLimit && context.bucketLimit < context.globalLimit + ? `This bucket has a lower limit than your global setting (${formatBytes( + context.globalLimit + )}). You can increase the bucket limit in your Storage settings.` + : '' + }` + : context?.globalLimit + ? `The ${entity} exceeded the maximum allowed size in your global settings (${formatBytes( + context.globalLimit + )})` + : `The ${entity} exceeded the maximum allowed size`, originalError: e, }), diff --git a/src/storage/object.ts b/src/storage/object.ts index 6922ecf4..6a0f9e5f 100644 --- a/src/storage/object.ts +++ b/src/storage/object.ts @@ -6,7 +6,7 @@ import { getJwtSecret } from '@internal/database' import { ObjectMetadata, StorageBackendAdapter } from './backend' import { Database, FindObjectFilters, SearchObjectOption } from './database' import { mustBeValidKey } from './limits' -import { fileUploadFromRequest, Uploader, UploadRequest } from './uploader' +import { Uploader, UploadRequest } from './uploader' import { getConfig } from '../config' import { ObjectAdminDelete, @@ -16,7 +16,6 @@ import { ObjectRemovedMove, ObjectUpdatedMetadata, } from './events' -import { FastifyRequest } from 'fastify/types/request' import { Obj } from '@storage/schemas' import { StorageObjectLocator } from '@storage/locator' @@ -66,39 +65,18 @@ export class ObjectStorage { return new ObjectStorage(this.backend, this.db.asSuperUser(), this.location, this.bucketId) } - async uploadFromRequest( - request: FastifyRequest, - file: { - objectName: string - owner?: string - isUpsert: boolean - signal?: AbortSignal - } - ) { - const bucket = await this.db - .asSuperUser() - .findBucketById(this.bucketId, 'id, file_size_limit, allowed_mime_types') - - const uploadRequest = await fileUploadFromRequest(request, { - objectName: file.objectName, - fileSizeLimit: bucket.file_size_limit, - allowedMimeTypes: bucket.allowed_mime_types || [], - }) - - return this.uploadNewObject({ - file: uploadRequest, - objectName: file.objectName, - owner: file.owner, - isUpsert: Boolean(file.isUpsert), - signal: file.signal, - }) - } - /** * Upload a new object to a storage * @param request */ - async uploadNewObject(request: Omit) { + async uploadNewObject( + request: Omit & { + bucketContext?: { + name: string + fileSizeLimit?: number | null + } + } + ) { mustBeValidKey(request.objectName) const path = `${this.bucketId}/${request.objectName}` @@ -107,6 +85,7 @@ export class ObjectStorage { ...request, bucketId: this.bucketId, uploadType: 'standard', + bucketContext: request.bucketContext, }) return { objectMetadata: metadata, path, id: obj.id } diff --git a/src/storage/protocols/s3/byte-limit-stream.ts b/src/storage/protocols/s3/byte-limit-stream.ts index fce7c98a..1044a6c6 100644 --- a/src/storage/protocols/s3/byte-limit-stream.ts +++ b/src/storage/protocols/s3/byte-limit-stream.ts @@ -4,7 +4,14 @@ import { ERRORS } from '@internal/errors' export class ByteLimitTransformStream extends Transform { bytesProcessed = 0 - constructor(private readonly limit: number) { + constructor( + private readonly limit: number, + private readonly bucketContext?: { + name: string + fileSizeLimit?: number | null + globalLimit?: number + } + ) { super() } @@ -12,7 +19,14 @@ export class ByteLimitTransformStream extends Transform { this.bytesProcessed += chunk.length if (this.bytesProcessed > this.limit) { - callback(ERRORS.EntityTooLarge()) + const context = this.bucketContext + ? { + bucketName: this.bucketContext.name, + bucketLimit: this.bucketContext.fileSizeLimit || undefined, + globalLimit: this.bucketContext.globalLimit, + } + : undefined + callback(ERRORS.EntityTooLarge(undefined, 'object', context)) } else { callback(null, chunk) } diff --git a/src/storage/protocols/s3/s3-handler.ts b/src/storage/protocols/s3/s3-handler.ts index f9239730..18ff16bc 100644 --- a/src/storage/protocols/s3/s3-handler.ts +++ b/src/storage/protocols/s3/s3-handler.ts @@ -23,6 +23,7 @@ import { import { PassThrough, Readable } from 'stream' import stream from 'stream/promises' import { getFileSizeLimit, mustBeValidBucketName, mustBeValidKey } from '../../limits' +import { getStandardMaxFileSizeLimit } from '../../uploader' import { ERRORS } from '@internal/errors' import { S3MultipartUpload, Obj } from '../../schemas' import { decrypt, encrypt } from '@internal/auth' @@ -567,8 +568,11 @@ export class S3ProtocolHandler { throw ERRORS.MissingContentLength() } - const bucket = await this.storage.asSuperUser().findBucket(Bucket, 'file_size_limit') - const maxFileSize = await getFileSizeLimit(this.storage.db.tenantId, bucket?.file_size_limit) + const bucket = await this.storage.asSuperUser().findBucket(Bucket, 'name, file_size_limit') + const limits = await getStandardMaxFileSizeLimit( + this.storage.db.tenantId, + bucket?.file_size_limit + ) const uploader = new Uploader(this.storage.backend, this.storage.db, this.storage.location) await uploader.canUpload({ @@ -578,7 +582,16 @@ export class S3ProtocolHandler { isUpsert: true, }) - const multipart = await this.shouldAllowPartUpload(UploadId, ContentLength, maxFileSize) + const multipart = await this.shouldAllowPartUpload( + UploadId, + ContentLength, + limits.maxFileSize, + { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + globalLimit: limits.globalLimit, + } + ) if (signal?.aborted) { throw ERRORS.AbortedTerminate('UploadPart aborted') @@ -603,7 +616,11 @@ export class S3ProtocolHandler { try { const uploadPart = await stream.pipeline( body, - new ByteLimitTransformStream(ContentLength), + new ByteLimitTransformStream(ContentLength, { + name: bucket.name, + fileSizeLimit: bucket.file_size_limit, + globalLimit: limits.globalLimit, + }), async (stream) => { return this.storage.backend.uploadPart( storageS3Bucket, @@ -674,7 +691,15 @@ export class S3ProtocolHandler { */ async putObject( command: PutObjectCommandInput, - options: { signal?: AbortSignal; isTruncated: () => boolean } + options: { + signal?: AbortSignal + isTruncated: () => boolean + bucketContext?: { + name: string + fileSizeLimit?: number | null + globalLimit?: number + } + } ) { const uploader = new Uploader(this.storage.backend, this.storage.db, this.storage.location) @@ -695,6 +720,7 @@ export class S3ProtocolHandler { isUpsert: true, uploadType: 's3', signal: options.signal, + bucketContext: options.bucketContext, }) return { @@ -1201,16 +1227,25 @@ export class S3ProtocolHandler { const [destinationBucket] = await this.storage.db.asSuperUser().withTransaction(async (db) => { return Promise.all([ - db.findBucketById(Bucket, 'file_size_limit'), + db.findBucketById(Bucket, 'name, file_size_limit'), db.findBucketById(sourceBucketName, 'id'), ]) }) - const maxFileSize = await getFileSizeLimit( + const limits = await getStandardMaxFileSizeLimit( this.storage.db.tenantId, destinationBucket?.file_size_limit ) - const multipart = await this.shouldAllowPartUpload(UploadId, Number(copySize), maxFileSize) + const multipart = await this.shouldAllowPartUpload( + UploadId, + Number(copySize), + limits.maxFileSize, + { + name: destinationBucket.name, + fileSizeLimit: destinationBucket.file_size_limit, + globalLimit: limits.globalLimit, + } + ) const uploadPart = await this.storage.backend.uploadPartCopy( storageS3Bucket, @@ -1278,7 +1313,8 @@ export class S3ProtocolHandler { protected async shouldAllowPartUpload( uploadId: string, contentLength: number, - maxFileSize: number + maxFileSize: number, + bucketContext?: { name: string; fileSizeLimit?: number | null; globalLimit?: number } ) { return this.storage.db.asSuperUser().withTransaction(async (db) => { const multipart = await db.findMultipartUpload( @@ -1298,7 +1334,14 @@ export class S3ProtocolHandler { const currentProgress = multipart.in_progress_size + contentLength if (currentProgress > maxFileSize) { - throw ERRORS.EntityTooLarge() + const context = bucketContext + ? { + bucketName: bucketContext.name, + bucketLimit: bucketContext.fileSizeLimit || undefined, + globalLimit: bucketContext.globalLimit, + } + : undefined + throw ERRORS.EntityTooLarge(undefined, 'object', context) } const signature = this.uploadSignature({ in_progress_size: currentProgress }) diff --git a/src/storage/storage.ts b/src/storage/storage.ts index 15aa5c4e..8fcf86dd 100644 --- a/src/storage/storage.ts +++ b/src/storage/storage.ts @@ -313,7 +313,10 @@ export class Storage { const globalMaxLimit = await getFileSizeLimit(this.db.tenantId) if (maxFileLimit > globalMaxLimit) { - throw ERRORS.EntityTooLarge() + throw ERRORS.EntityTooLarge(undefined, 'bucket file size limit', { + bucketLimit: maxFileLimit, + globalLimit: globalMaxLimit, + }) } return maxFileLimit diff --git a/src/storage/uploader.ts b/src/storage/uploader.ts index 4bad81cd..f9a25ab9 100644 --- a/src/storage/uploader.ts +++ b/src/storage/uploader.ts @@ -21,6 +21,7 @@ interface FileUpload { cacheControl: string isTruncated: () => boolean userMetadata?: Record + globalLimit?: number } export interface UploadRequest { @@ -31,6 +32,10 @@ export interface UploadRequest { isUpsert?: boolean uploadType?: 'standard' | 's3' | 'resumable' signal?: AbortSignal + bucketContext?: { + name: string + fileSizeLimit?: number | null + } } const MAX_CUSTOM_METADATA_SIZE = 1024 * 1024 @@ -113,7 +118,18 @@ export class Uploader { ) if (file.isTruncated()) { - throw ERRORS.EntityTooLarge() + const context = request.bucketContext + ? { + bucketName: request.bucketContext.name, + bucketLimit: request.bucketContext.fileSizeLimit || undefined, + globalLimit: file.globalLimit, + } + : file.globalLimit + ? { + globalLimit: file.globalLimit, + } + : undefined + throw ERRORS.EntityTooLarge(undefined, 'object', context) } return this.completeUpload({ @@ -309,8 +325,11 @@ export async function fileUploadFromRequest( let maxFileSize = 0 // When is an empty folder we restrict it to 0 bytes + let globalLimit: number | undefined if (!isEmptyFolder(options.objectName)) { - maxFileSize = await getStandardMaxFileSizeLimit(request.tenantId, options?.fileSizeLimit) + const limits = await getStandardMaxFileSizeLimit(request.tenantId, options?.fileSizeLimit) + maxFileSize = limits.maxFileSize + globalLimit = limits.globalLimit } let cacheControl: string @@ -388,6 +407,7 @@ export async function fileUploadFromRequest( isTruncated, userMetadata, maxFileSize, + globalLimit, } } @@ -404,8 +424,9 @@ export function parseUserMetadata(metadata: string) { export async function getStandardMaxFileSizeLimit( tenantId: string, bucketSizeLimit?: number | null -) { +): Promise<{ maxFileSize: number; globalLimit: number }> { let globalFileSizeLimit = await getFileSizeLimit(tenantId) + const originalGlobalLimit = globalFileSizeLimit if (typeof bucketSizeLimit === 'number') { globalFileSizeLimit = Math.min(bucketSizeLimit, globalFileSizeLimit) @@ -415,5 +436,5 @@ export async function getStandardMaxFileSizeLimit( globalFileSizeLimit = Math.min(uploadFileSizeLimitStandard, globalFileSizeLimit) } - return globalFileSizeLimit + return { maxFileSize: globalFileSizeLimit, globalLimit: originalGlobalLimit } } diff --git a/src/test/object.test.ts b/src/test/object.test.ts index 71bf5a20..e9bd26d2 100644 --- a/src/test/object.test.ts +++ b/src/test/object.test.ts @@ -406,7 +406,9 @@ describe('testing POST object via multipart upload', () => { expect(response.statusCode).toBe(400) expect(await response.json()).toEqual({ error: 'Payload too large', - message: 'The object exceeded the maximum allowed size', + message: expect.stringMatching( + /The object exceeded the maximum allowed size for bucket "public-limit-max-size"/ + ), statusCode: '413', }) expect(S3Backend.prototype.uploadObject).toHaveBeenCalled() @@ -569,7 +571,7 @@ describe('testing POST object via multipart upload', () => { expect(response.statusCode).toBe(400) expect(await response.json()).toEqual({ error: 'invalid_mime_type', - message: `mime type image/png is not supported`, + message: `MIME type image/png is not supported`, statusCode: '415', }) expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() @@ -630,7 +632,7 @@ describe('testing POST object via multipart upload', () => { expect(response.statusCode).toBe(400) expect(await response.json()).toEqual({ error: 'invalid_mime_type', - message: `mime type thisisnotarealmimetype is not supported`, + message: `MIME type thisisnotarealmimetype is not supported`, statusCode: '415', }) expect(S3Backend.prototype.uploadObject).not.toHaveBeenCalled() @@ -676,7 +678,7 @@ describe('testing POST object via multipart upload', () => { JSON.stringify({ statusCode: '413', error: 'Payload too large', - message: 'The object exceeded the maximum allowed size', + message: 'The object exceeded the maximum allowed size in your global settings (1 B)', }) ) }) @@ -913,7 +915,7 @@ describe('testing POST object via binary upload', () => { JSON.stringify({ statusCode: '413', error: 'Payload too large', - message: 'The object exceeded the maximum allowed size', + message: 'The object exceeded the maximum allowed size in your global settings (1 B)', }) ) }) diff --git a/src/test/s3-protocol.test.ts b/src/test/s3-protocol.test.ts index 57605ed3..827db603 100644 --- a/src/test/s3-protocol.test.ts +++ b/src/test/s3-protocol.test.ts @@ -673,7 +673,7 @@ describe('S3 Protocol', () => { expect((e as Error).message).not.toEqual('Should not reach here') expect((e as S3ServiceException).$metadata.httpStatusCode).toEqual(413) expect((e as S3ServiceException).message).toEqual( - 'The object exceeded the maximum allowed size' + 'The object exceeded the maximum allowed size in your global settings (10 KB)' ) expect((e as S3ServiceException).name).toEqual('EntityTooLarge') } @@ -705,7 +705,7 @@ describe('S3 Protocol', () => { expect((e as Error).message).not.toEqual('Should not reach here') expect((e as S3ServiceException).$metadata.httpStatusCode).toEqual(413) expect((e as S3ServiceException).message).toEqual( - 'The object exceeded the maximum allowed size' + 'The object exceeded the maximum allowed size in your global settings (10 KB)' ) expect((e as S3ServiceException).name).toEqual('EntityTooLarge') } @@ -751,7 +751,7 @@ describe('S3 Protocol', () => { expect((e as Error).message).not.toEqual('Should not reach here') expect((e as S3ServiceException).$metadata.httpStatusCode).toEqual(413) expect((e as S3ServiceException).message).toEqual( - 'The object exceeded the maximum allowed size' + 'The object exceeded the maximum allowed size in your global settings (10 KB)' ) expect((e as S3ServiceException).name).toEqual('EntityTooLarge') }