From 5073edcac5db62c24f6ee6ef068fb99deb7692f1 Mon Sep 17 00:00:00 2001 From: bas-d <7903735+bas-d@users.noreply.github.com> Date: Thu, 2 Dec 2021 18:23:59 -0600 Subject: [PATCH 01/17] Remove monorepo stuff --- .../.npmignore => .npmignore | 0 .travis.yml | 17 - README.md | 318 +- lerna.json | 5 - package.json | 54 +- packages/dynamodb-auto-marshaller/LICENSE | 201 - packages/dynamodb-auto-marshaller/README.md | 152 - .../dynamodb-auto-marshaller/package.json | 44 - .../src/BinarySet.spec.ts | 183 - .../dynamodb-auto-marshaller/src/BinarySet.ts | 68 - .../src/Marshaller.spec.ts | 771 --- .../src/Marshaller.ts | 451 -- .../src/NumberValue.spec.ts | 75 - .../src/NumberValue.ts | 49 - .../src/NumberValueSet.spec.ts | 184 - .../src/NumberValueSet.ts | 46 - .../dynamodb-auto-marshaller/src/ObjectSet.ts | 119 - .../dynamodb-auto-marshaller/src/index.ts | 4 - .../src/isArrayBuffer.spec.ts | 37 - .../src/isArrayBuffer.ts | 8 - .../dynamodb-auto-marshaller/tsconfig.json | 26 - .../tsconfig.test.json | 9 - packages/dynamodb-batch-iterator/.npmignore | 10 - packages/dynamodb-batch-iterator/CHANGELOG.md | 20 - packages/dynamodb-batch-iterator/LICENSE | 201 - packages/dynamodb-batch-iterator/README.md | 90 - packages/dynamodb-batch-iterator/package.json | 45 - .../src/BatchOperation.ts | 244 - packages/dynamodb-batch-iterator/src/index.ts | 4 - packages/dynamodb-batch-iterator/src/types.ts | 63 - .../dynamodb-batch-iterator/tsconfig.json | 29 - .../.npmignore | 10 - .../dynamodb-data-mapper-annotations/LICENSE | 201 - .../README.md | 176 - .../package.json | 49 - .../src/annotationShapes.ts | 9 - .../src/attribute.spec.ts | 464 -- .../src/attribute.ts | 209 - .../src/autoGeneratedHashKey.spec.ts | 35 - .../src/autoGeneratedHashKey.ts | 14 - .../src/constants.ts | 1 - .../src/exampleSchema.fixture.ts | 108 - .../src/functional.spec.ts | 115 - .../src/hashKey.spec.ts | 27 - .../src/hashKey.ts | 18 - .../src/index.ts | 6 - .../src/rangeKey.spec.ts | 27 - .../src/rangeKey.ts | 18 - .../src/table.spec.ts | 16 - .../src/table.ts | 17 - .../src/versionAttribute.spec.ts | 33 - .../src/versionAttribute.ts | 13 - .../tsconfig.json | 31 - .../tsconfig.test.json | 10 - packages/dynamodb-data-mapper/.gitignore | 1 - packages/dynamodb-data-mapper/.npmignore | 10 - packages/dynamodb-data-mapper/CHANGELOG.md | 131 - packages/dynamodb-data-mapper/LICENSE | 201 - packages/dynamodb-data-mapper/README.md | 705 --- .../dynamodb-data-mapper/jest.integration.js | 6 - packages/dynamodb-data-mapper/package.json | 51 - .../dynamodb-data-mapper/src/BatchState.ts | 13 - .../src/DataMapper.integ.ts | 226 - .../src/DataMapper.spec.ts | 4529 ----------------- .../dynamodb-data-mapper/src/DataMapper.ts | 1385 ----- .../src/ItemNotFoundException.spec.ts | 32 - .../src/ItemNotFoundException.ts | 23 - packages/dynamodb-data-mapper/src/Iterator.ts | 112 - .../dynamodb-data-mapper/src/Paginator.ts | 106 - .../src/ParallelScanIterator.ts | 27 - .../src/ParallelScanPaginator.ts | 88 - .../dynamodb-data-mapper/src/QueryIterator.ts | 27 - .../src/QueryPaginator.ts | 130 - .../dynamodb-data-mapper/src/ScanIterator.ts | 19 - .../dynamodb-data-mapper/src/ScanPaginator.ts | 23 - .../src/asyncIteratorSymbolPolyfill.ts | 8 - .../src/buildScanInput.ts | 76 - .../dynamodb-data-mapper/src/constants.ts | 15 - .../dynamodb-data-mapper/src/embed.spec.ts | 35 - packages/dynamodb-data-mapper/src/embed.ts | 23 - packages/dynamodb-data-mapper/src/index.ts | 12 - .../src/marshallStartKey.ts | 24 - .../src/namedParameters/BatchGetOptions.ts | 28 - .../src/namedParameters/CreateTableOptions.ts | 40 - .../DataMapperConfiguration.ts | 27 - .../src/namedParameters/DeleteOptions.ts | 34 - .../ExecuteUpdateExpressionOptions.ts | 9 - .../src/namedParameters/GetOptions.ts | 22 - .../namedParameters/ProvisionedThroughput.ts | 4 - .../src/namedParameters/PutOptions.ts | 29 - .../src/namedParameters/QueryOptions.ts | 81 - .../ReadConsistencyConfiguration.ts | 8 - .../src/namedParameters/ScanOptions.ts | 169 - .../namedParameters/SecondaryIndexOptions.ts | 27 - .../src/namedParameters/UpdateOptions.ts | 32 - .../src/namedParameters/index.ts | 12 - .../src/protocols.spec.ts | 27 - .../dynamodb-data-mapper/src/protocols.ts | 111 - packages/dynamodb-data-mapper/tsconfig.json | 29 - .../dynamodb-data-mapper/tsconfig.test.json | 10 - packages/dynamodb-data-marshaller/.npmignore | 10 - packages/dynamodb-data-marshaller/LICENSE | 201 - packages/dynamodb-data-marshaller/README.md | 419 -- .../dynamodb-data-marshaller/package.json | 47 - .../src/InvalidSchemaError.ts | 11 - .../src/InvalidValueError.ts | 12 - .../dynamodb-data-marshaller/src/KeySchema.ts | 23 - .../src/Schema.spec.ts | 49 - .../dynamodb-data-marshaller/src/Schema.ts | 26 - .../src/SchemaType.spec.ts | 283 - .../src/SchemaType.ts | 406 -- .../dynamodb-data-marshaller/src/index.ts | 12 - .../src/isKey.spec.ts | 94 - .../dynamodb-data-marshaller/src/isKey.ts | 19 - .../src/keysFromSchema.spec.ts | 95 - .../src/keysFromSchema.ts | 80 - .../src/marshallExpression.spec.ts | 259 - .../src/marshallExpression.ts | 293 -- .../src/marshallItem.spec.ts | 623 --- .../src/marshallItem.ts | 322 -- .../src/marshallKey.spec.ts | 52 - .../src/marshallKey.ts | 25 - .../src/toSchemaName.spec.ts | 93 - .../src/toSchemaName.ts | 49 - .../src/unmarshallItem.spec.ts | 563 -- .../src/unmarshallItem.ts | 179 - .../dynamodb-data-marshaller/tsconfig.json | 28 - .../tsconfig.test.json | 10 - packages/dynamodb-expressions/.npmignore | 10 - packages/dynamodb-expressions/LICENSE | 201 - packages/dynamodb-expressions/README.md | 501 -- packages/dynamodb-expressions/package.json | 45 - .../src/AttributeBearingExpression.ts | 11 - .../src/AttributePath.spec.ts | 116 - .../dynamodb-expressions/src/AttributePath.ts | 154 - .../src/AttributeValue.spec.ts | 36 - .../src/AttributeValue.ts | 22 - .../src/ConditionExpression.spec.ts | 648 --- .../src/ConditionExpression.ts | 499 -- .../src/ExpressionAttributes.spec.ts | 686 --- .../src/ExpressionAttributes.ts | 69 - .../src/FunctionExpression.spec.ts | 78 - .../src/FunctionExpression.ts | 43 - .../src/MathematicalExpression.spec.ts | 86 - .../src/MathematicalExpression.ts | 44 - .../src/ProjectionExpression.spec.ts | 66 - .../src/ProjectionExpression.ts | 25 - .../src/UpdateExpression.spec.ts | 163 - .../src/UpdateExpression.ts | 99 - packages/dynamodb-expressions/src/index.ts | 8 - packages/dynamodb-expressions/tsconfig.json | 27 - .../dynamodb-expressions/tsconfig.test.json | 10 - packages/dynamodb-query-iterator/.npmignore | 10 - packages/dynamodb-query-iterator/CHANGELOG.md | 14 - packages/dynamodb-query-iterator/LICENSE | 201 - packages/dynamodb-query-iterator/README.md | 357 -- packages/dynamodb-query-iterator/package.json | 44 - .../src/DynamoDbPaginator.ts | 114 - .../src/DynamoDbPaginatorInterface.ts | 32 - .../src/DynamoDbResultsPage.ts | 51 - .../src/ItemIterator.ts | 117 - .../src/ParallelScanInput.ts | 25 - .../src/ParallelScanIterator.spec.ts | 186 - .../src/ParallelScanIterator.ts | 17 - .../src/ParallelScanPaginator.spec.ts | 390 -- .../src/ParallelScanPaginator.ts | 237 - .../src/QueryIterator.spec.ts | 185 - .../src/QueryIterator.ts | 10 - .../src/QueryPaginator.spec.ts | 350 -- .../src/QueryPaginator.ts | 46 - .../src/ScanIterator.spec.ts | 146 - .../src/ScanIterator.ts | 10 - .../src/ScanPaginator.spec.ts | 278 - .../src/ScanPaginator.ts | 49 - packages/dynamodb-query-iterator/src/index.ts | 8 - .../src/mergeConsumedCapacities.spec.ts | 151 - .../src/mergeConsumedCapacities.ts | 71 - .../dynamodb-query-iterator/tsconfig.json | 29 - .../tsconfig.test.json | 10 - .../src => src}/BatchGet.spec.ts | 0 .../src => src}/BatchGet.ts | 0 .../src => src}/BatchGetOptions.ts | 0 src/BatchOperation.ts | 243 + .../src => src}/BatchWrite.spec.ts | 0 .../src => src}/BatchWrite.ts | 0 src/index.ts | 4 + .../src => src}/itemIdentifier.spec.ts | 0 .../src => src}/itemIdentifier.ts | 0 src/types.ts | 68 + tsconfig.json | 20 +- .../tsconfig.test.json => tsconfig.test.json | 0 191 files changed, 429 insertions(+), 24845 deletions(-) rename packages/dynamodb-auto-marshaller/.npmignore => .npmignore (100%) delete mode 100644 .travis.yml delete mode 100644 lerna.json delete mode 100644 packages/dynamodb-auto-marshaller/LICENSE delete mode 100644 packages/dynamodb-auto-marshaller/README.md delete mode 100644 packages/dynamodb-auto-marshaller/package.json delete mode 100644 packages/dynamodb-auto-marshaller/src/BinarySet.spec.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/BinarySet.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/Marshaller.spec.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/Marshaller.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/NumberValue.spec.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/NumberValue.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/NumberValueSet.spec.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/NumberValueSet.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/ObjectSet.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/index.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/isArrayBuffer.spec.ts delete mode 100644 packages/dynamodb-auto-marshaller/src/isArrayBuffer.ts delete mode 100644 packages/dynamodb-auto-marshaller/tsconfig.json delete mode 100644 packages/dynamodb-auto-marshaller/tsconfig.test.json delete mode 100644 packages/dynamodb-batch-iterator/.npmignore delete mode 100644 packages/dynamodb-batch-iterator/CHANGELOG.md delete mode 100644 packages/dynamodb-batch-iterator/LICENSE delete mode 100644 packages/dynamodb-batch-iterator/README.md delete mode 100644 packages/dynamodb-batch-iterator/package.json delete mode 100644 packages/dynamodb-batch-iterator/src/BatchOperation.ts delete mode 100644 packages/dynamodb-batch-iterator/src/index.ts delete mode 100644 packages/dynamodb-batch-iterator/src/types.ts delete mode 100644 packages/dynamodb-batch-iterator/tsconfig.json delete mode 100644 packages/dynamodb-data-mapper-annotations/.npmignore delete mode 100644 packages/dynamodb-data-mapper-annotations/LICENSE delete mode 100644 packages/dynamodb-data-mapper-annotations/README.md delete mode 100644 packages/dynamodb-data-mapper-annotations/package.json delete mode 100644 packages/dynamodb-data-mapper-annotations/src/annotationShapes.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/attribute.spec.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/attribute.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/autoGeneratedHashKey.spec.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/autoGeneratedHashKey.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/constants.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/exampleSchema.fixture.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/functional.spec.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/hashKey.spec.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/hashKey.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/index.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/rangeKey.spec.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/rangeKey.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/table.spec.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/table.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/versionAttribute.spec.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/src/versionAttribute.ts delete mode 100644 packages/dynamodb-data-mapper-annotations/tsconfig.json delete mode 100644 packages/dynamodb-data-mapper-annotations/tsconfig.test.json delete mode 100644 packages/dynamodb-data-mapper/.gitignore delete mode 100644 packages/dynamodb-data-mapper/.npmignore delete mode 100644 packages/dynamodb-data-mapper/CHANGELOG.md delete mode 100644 packages/dynamodb-data-mapper/LICENSE delete mode 100644 packages/dynamodb-data-mapper/README.md delete mode 100644 packages/dynamodb-data-mapper/jest.integration.js delete mode 100644 packages/dynamodb-data-mapper/package.json delete mode 100644 packages/dynamodb-data-mapper/src/BatchState.ts delete mode 100644 packages/dynamodb-data-mapper/src/DataMapper.integ.ts delete mode 100644 packages/dynamodb-data-mapper/src/DataMapper.spec.ts delete mode 100644 packages/dynamodb-data-mapper/src/DataMapper.ts delete mode 100644 packages/dynamodb-data-mapper/src/ItemNotFoundException.spec.ts delete mode 100644 packages/dynamodb-data-mapper/src/ItemNotFoundException.ts delete mode 100644 packages/dynamodb-data-mapper/src/Iterator.ts delete mode 100644 packages/dynamodb-data-mapper/src/Paginator.ts delete mode 100644 packages/dynamodb-data-mapper/src/ParallelScanIterator.ts delete mode 100644 packages/dynamodb-data-mapper/src/ParallelScanPaginator.ts delete mode 100644 packages/dynamodb-data-mapper/src/QueryIterator.ts delete mode 100644 packages/dynamodb-data-mapper/src/QueryPaginator.ts delete mode 100644 packages/dynamodb-data-mapper/src/ScanIterator.ts delete mode 100644 packages/dynamodb-data-mapper/src/ScanPaginator.ts delete mode 100644 packages/dynamodb-data-mapper/src/asyncIteratorSymbolPolyfill.ts delete mode 100644 packages/dynamodb-data-mapper/src/buildScanInput.ts delete mode 100644 packages/dynamodb-data-mapper/src/constants.ts delete mode 100644 packages/dynamodb-data-mapper/src/embed.spec.ts delete mode 100644 packages/dynamodb-data-mapper/src/embed.ts delete mode 100644 packages/dynamodb-data-mapper/src/index.ts delete mode 100644 packages/dynamodb-data-mapper/src/marshallStartKey.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/BatchGetOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/CreateTableOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/DataMapperConfiguration.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/DeleteOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/ExecuteUpdateExpressionOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/GetOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/ProvisionedThroughput.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/PutOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/QueryOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/ReadConsistencyConfiguration.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/ScanOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/SecondaryIndexOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/UpdateOptions.ts delete mode 100644 packages/dynamodb-data-mapper/src/namedParameters/index.ts delete mode 100644 packages/dynamodb-data-mapper/src/protocols.spec.ts delete mode 100644 packages/dynamodb-data-mapper/src/protocols.ts delete mode 100644 packages/dynamodb-data-mapper/tsconfig.json delete mode 100644 packages/dynamodb-data-mapper/tsconfig.test.json delete mode 100644 packages/dynamodb-data-marshaller/.npmignore delete mode 100644 packages/dynamodb-data-marshaller/LICENSE delete mode 100644 packages/dynamodb-data-marshaller/README.md delete mode 100644 packages/dynamodb-data-marshaller/package.json delete mode 100644 packages/dynamodb-data-marshaller/src/InvalidSchemaError.ts delete mode 100644 packages/dynamodb-data-marshaller/src/InvalidValueError.ts delete mode 100644 packages/dynamodb-data-marshaller/src/KeySchema.ts delete mode 100644 packages/dynamodb-data-marshaller/src/Schema.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/Schema.ts delete mode 100644 packages/dynamodb-data-marshaller/src/SchemaType.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/SchemaType.ts delete mode 100644 packages/dynamodb-data-marshaller/src/index.ts delete mode 100644 packages/dynamodb-data-marshaller/src/isKey.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/isKey.ts delete mode 100644 packages/dynamodb-data-marshaller/src/keysFromSchema.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/keysFromSchema.ts delete mode 100644 packages/dynamodb-data-marshaller/src/marshallExpression.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/marshallExpression.ts delete mode 100644 packages/dynamodb-data-marshaller/src/marshallItem.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/marshallItem.ts delete mode 100644 packages/dynamodb-data-marshaller/src/marshallKey.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/marshallKey.ts delete mode 100644 packages/dynamodb-data-marshaller/src/toSchemaName.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/toSchemaName.ts delete mode 100644 packages/dynamodb-data-marshaller/src/unmarshallItem.spec.ts delete mode 100644 packages/dynamodb-data-marshaller/src/unmarshallItem.ts delete mode 100644 packages/dynamodb-data-marshaller/tsconfig.json delete mode 100644 packages/dynamodb-data-marshaller/tsconfig.test.json delete mode 100644 packages/dynamodb-expressions/.npmignore delete mode 100644 packages/dynamodb-expressions/LICENSE delete mode 100644 packages/dynamodb-expressions/README.md delete mode 100644 packages/dynamodb-expressions/package.json delete mode 100644 packages/dynamodb-expressions/src/AttributeBearingExpression.ts delete mode 100644 packages/dynamodb-expressions/src/AttributePath.spec.ts delete mode 100644 packages/dynamodb-expressions/src/AttributePath.ts delete mode 100644 packages/dynamodb-expressions/src/AttributeValue.spec.ts delete mode 100644 packages/dynamodb-expressions/src/AttributeValue.ts delete mode 100644 packages/dynamodb-expressions/src/ConditionExpression.spec.ts delete mode 100644 packages/dynamodb-expressions/src/ConditionExpression.ts delete mode 100644 packages/dynamodb-expressions/src/ExpressionAttributes.spec.ts delete mode 100644 packages/dynamodb-expressions/src/ExpressionAttributes.ts delete mode 100644 packages/dynamodb-expressions/src/FunctionExpression.spec.ts delete mode 100644 packages/dynamodb-expressions/src/FunctionExpression.ts delete mode 100644 packages/dynamodb-expressions/src/MathematicalExpression.spec.ts delete mode 100644 packages/dynamodb-expressions/src/MathematicalExpression.ts delete mode 100644 packages/dynamodb-expressions/src/ProjectionExpression.spec.ts delete mode 100644 packages/dynamodb-expressions/src/ProjectionExpression.ts delete mode 100644 packages/dynamodb-expressions/src/UpdateExpression.spec.ts delete mode 100644 packages/dynamodb-expressions/src/UpdateExpression.ts delete mode 100644 packages/dynamodb-expressions/src/index.ts delete mode 100644 packages/dynamodb-expressions/tsconfig.json delete mode 100644 packages/dynamodb-expressions/tsconfig.test.json delete mode 100644 packages/dynamodb-query-iterator/.npmignore delete mode 100644 packages/dynamodb-query-iterator/CHANGELOG.md delete mode 100644 packages/dynamodb-query-iterator/LICENSE delete mode 100644 packages/dynamodb-query-iterator/README.md delete mode 100644 packages/dynamodb-query-iterator/package.json delete mode 100644 packages/dynamodb-query-iterator/src/DynamoDbPaginator.ts delete mode 100644 packages/dynamodb-query-iterator/src/DynamoDbPaginatorInterface.ts delete mode 100644 packages/dynamodb-query-iterator/src/DynamoDbResultsPage.ts delete mode 100644 packages/dynamodb-query-iterator/src/ItemIterator.ts delete mode 100644 packages/dynamodb-query-iterator/src/ParallelScanInput.ts delete mode 100644 packages/dynamodb-query-iterator/src/ParallelScanIterator.spec.ts delete mode 100644 packages/dynamodb-query-iterator/src/ParallelScanIterator.ts delete mode 100644 packages/dynamodb-query-iterator/src/ParallelScanPaginator.spec.ts delete mode 100644 packages/dynamodb-query-iterator/src/ParallelScanPaginator.ts delete mode 100644 packages/dynamodb-query-iterator/src/QueryIterator.spec.ts delete mode 100644 packages/dynamodb-query-iterator/src/QueryIterator.ts delete mode 100644 packages/dynamodb-query-iterator/src/QueryPaginator.spec.ts delete mode 100644 packages/dynamodb-query-iterator/src/QueryPaginator.ts delete mode 100644 packages/dynamodb-query-iterator/src/ScanIterator.spec.ts delete mode 100644 packages/dynamodb-query-iterator/src/ScanIterator.ts delete mode 100644 packages/dynamodb-query-iterator/src/ScanPaginator.spec.ts delete mode 100644 packages/dynamodb-query-iterator/src/ScanPaginator.ts delete mode 100644 packages/dynamodb-query-iterator/src/index.ts delete mode 100644 packages/dynamodb-query-iterator/src/mergeConsumedCapacities.spec.ts delete mode 100644 packages/dynamodb-query-iterator/src/mergeConsumedCapacities.ts delete mode 100644 packages/dynamodb-query-iterator/tsconfig.json delete mode 100644 packages/dynamodb-query-iterator/tsconfig.test.json rename {packages/dynamodb-batch-iterator/src => src}/BatchGet.spec.ts (100%) rename {packages/dynamodb-batch-iterator/src => src}/BatchGet.ts (100%) rename {packages/dynamodb-batch-iterator/src => src}/BatchGetOptions.ts (100%) create mode 100644 src/BatchOperation.ts rename {packages/dynamodb-batch-iterator/src => src}/BatchWrite.spec.ts (100%) rename {packages/dynamodb-batch-iterator/src => src}/BatchWrite.ts (100%) rename {packages/dynamodb-batch-iterator/src => src}/itemIdentifier.spec.ts (100%) rename {packages/dynamodb-batch-iterator/src => src}/itemIdentifier.ts (100%) create mode 100644 src/types.ts rename packages/dynamodb-batch-iterator/tsconfig.test.json => tsconfig.test.json (100%) diff --git a/packages/dynamodb-auto-marshaller/.npmignore b/.npmignore similarity index 100% rename from packages/dynamodb-auto-marshaller/.npmignore rename to .npmignore diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 1f0834e4..00000000 --- a/.travis.yml +++ /dev/null @@ -1,17 +0,0 @@ -language: node_js -node_js: - - "node" - - "lts/*" - - "11" - - "10" - - "9" - - "8" - - "6" - -sudo: false - -before_script: - - npm run bootstrap - -script: - - npm test -- --runInBand diff --git a/README.md b/README.md index 7776af8f..2af8c550 100644 --- a/README.md +++ b/README.md @@ -1,284 +1,90 @@ -# Amazon DynamoDB DataMapper For JavaScript +# Amazon DynamoDB Batch Iteration [![Apache 2 License](https://img.shields.io/github/license/awslabs/dynamodb-data-mapper-js.svg?style=flat)](http://aws.amazon.com/apache-2-0/) -This repository hosts several packages that collectively make up an object to -document mapper for JavaScript applications using Amazon DynamoDB. +This library provides utilities for automatically submitting arbitrarily-sized +batches of reads and writes to DynamoDB using well-formed `BatchGetItem` and +`BatchWriteItem` operations, respectively. Partial successes (i.e., +`BatchGetItem` operations that return some responses and some unprocessed keys +or `BatchWriteItem` operations that return some unprocessed items) will retry +the unprocessed items automatically using exponential backoff. ## Getting started -[The `@aws/dynamodb-data-mapper` package](packages/dynamodb-data-mapper) provides -a simple way to persist and load an application's domain objects to and from -Amazon DynamoDB. When used together with the decorators provided by [the -`@aws/dynamodb-data-mapper-annotations` package](packages/dynamodb-data-mapper-annotations), -you can describe the relationship between a class and its representation in -DynamoDB by adding a few decorators: +### Reading batches of items -```typescript -import { - attribute, - hashKey, - rangeKey, - table, -} from '@aws/dynamodb-data-mapper-annotations'; - -@table('table_name') -class MyDomainObject { - @hashKey() - id: string; - - @rangeKey({defaultProvider: () => new Date()}) - createdAt: Date; - - @attribute() - completed?: boolean; -} -``` - -With domain classes defined, you can interact with records in DynamoDB via an -instance of `DataMapper`: +Create a `BatchGet` object, supplying an instantiated DynamoDB client from the +AWS SDK for JavaScript and an iterable of keys that you wish to retrieve. The +iterable may be synchronous (such as an array) or asynchronous (such as an +object stream wrapped with [async-iter-stream](https://github.com/calvinmetcalf/async-iter-stream)'s +`wrap` method). ```typescript -import {DataMapper} from '@aws/dynamodb-data-mapper'; +import { BatchGet } from '@aws/dynamodb-batch-iterator'; import DynamoDB = require('aws-sdk/clients/dynamodb'); -const mapper = new DataMapper({ - client: new DynamoDB({region: 'us-west-2'}), // the SDK client used to execute operations - tableNamePrefix: 'dev_' // optionally, you can provide a table prefix to keep your dev and prod tables separate -}); -``` - -### Supported operations - -Using the `mapper` object and `MyDomainObject` class defined above, you can -perform the following operations: - -#### `put` - -Creates (or overwrites) an item in the table - -```typescript -const toSave = Object.assign(new MyDomainObject, {id: 'foo'}); -mapper.put(toSave).then(objectSaved => { - // the record has been saved -}); -``` - -#### `get` - -Retrieves an item from DynamoDB - -```typescript -mapper.get(Object.assign(new MyDomainObject, {id: 'foo', createdAt: new Date(946684800000)})) - .then(myItem => { - // the item was found - }) - .catch(err => { - // the item was not found - }) -``` - -**NB:** The promise returned by the mapper will be rejected with an -`ItemNotFoundException` if the item sought is not found. - -#### `update` - -Updates an item in the table - -```typescript -const myItem = await mapper.get(Object.assign( - new MyDomainObject, - {id: 'foo', createdAt: new Date(946684800000)} -)); -myItem.completed = true; - -await mapper.update(myItem); -``` - -#### `delete` - -Removes an item from the table - -```typescript -await mapper.delete(Object.assign( - new MyDomainObject, - {id: 'foo', createdAt: new Date(946684800000)} -)); -``` - -#### `scan` - -Lists the items in a table or index - -```typescript -for await (const item of mapper.scan(MyDomainObject)) { - // individual items will be yielded as the scan is performed -} - -// Optionally, scan an index instead of the table: -for await (const item of mapper.scan(MyDomainObject, {indexName: 'myIndex'})) { - // individual items will be yielded as the scan is performed -} -``` - -#### `query` - -Finds a specific item (or range of items) in a table or index +const dynamoDb = new DynamoDB({region: 'us-west-2'}); +const keys = [ + ['tableName', {keyProperty: {N: '0'}}], + ['tableName', {keyProperty: {N: '1'}}], + ['tableName', {keyProperty: {N: '2'}}], + // etc., continuing to count up to + ['tableName', {keyProperty: {N: '1001'}}], +]; -```typescript -for await (const foo of mapper.query(MyDomainObject, {id: 'foo'})) { - // individual items with a hash key of "foo" will be yielded as the query is performed +for await (const item of new BatchGet(dynamoDb, keys)) { + console.log(item); } ``` -#### Batch operations - -The mapper also supports batch operations. Under the hood, the batch will -automatically be split into chunks that fall within DynamoDB's limits (25 for -`batchPut` and `batchDelete`, 100 for `batchGet`). The items can belong to any -number of tables, and exponential backoff for unprocessed items is handled -automatically. +The above code snippet will automatically split the provided keys into +`BatchGetItem` requests of 100 or fewer keys, and any unprocessed keys will be +automatically retried until they are handled. The above code will execute at +least 11 `BatchGetItem` operations, dependening on how many items are returned +without processing due to insufficient provisioned read capacity. -##### `batchPut` +Each item yielded in the `for...await...of` loop will be a single DynamoDB +record. Iteration will stop once each key has been retrieved or an error has +been encountered. -Creates (or overwrites) multiple items in the table +### Writing batches of items -```typescript -const toSave = [ - Object.assign(new MyDomainObject, {id: 'foo', completed: false}), - Object.assign(new MyDomainObject, {id: 'bar', completed: false}) -]; -for await (const persisted of mapper.batchPut(toSave)) { - // items will be yielded as they are successfully written -} -``` - -##### `batchGet` +Create a `BatchWrite` object, supplying an instantiated DynamoDB client from the +AWS SDK for JavaScript and an iterable of write requests that you wish to +execute. The iterable may be synchronous (such as an array) or asynchronous +(such as an object stream wrapped with [async-iter-stream](https://github.com/calvinmetcalf/async-iter-stream)'s +`wrap` method). -Fetches multiple items from the table +Each write request should contain either a `DeleteRequest` key or a `PutRequest` +key as described [in the Amazon DynamoDB API reference](http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_WriteRequest.html#DDB-Type-WriteRequest-DeleteRequest). ```typescript -const toGet = [ - Object.assign(new MyDomainObject, {id: 'foo', createdAt: new Date(946684800000)}), - Object.assign(new MyDomainObject, {id: 'bar', createdAt: new Date(946684800001)}) -]; -for await (const found of mapper.batchGet(toGet)) { - // items will be yielded as they are successfully retrieved -} -``` - -**NB:** Only items that exist in the table will be retrieved. If a key is not -found, it will be omitted from the result. - -##### `batchDelete` - -Removes multiple items from the table +import { BatchWrite } from '@aws/dynamodb-batch-iterator'; +import DynamoDB = require('aws-sdk/clients/dynamodb'); -```typescript -const toRemove = [ - Object.assign(new MyDomainObject, {id: 'foo', createdAt: new Date(946684800000)}), - Object.assign(new MyDomainObject, {id: 'bar', createdAt: new Date(946684800001)}) +const dynamoDb = new DynamoDB({region: 'us-west-2'}); +const keys = [ + ['tableName', {DeleteRequest: {Key: {keyProperty: {N: '0'}}}}], + ['tableName', {PutRequest: {Item: {keyProperty: {N: '1'}, otherProperty: {BOOL: false}}}}], + ['tableName', {DeleteRequest: {Key: {keyProperty: {N: '2'}}}}], + ['tableName', {PutRequest: {Item: {keyProperty: {N: '3'}, otherProperty: {BOOL: false}}}}], + ['tableName', {N: '2'}], + // etc., continuing to count up to + ['tableName', {DeleteRequest: {Key: {keyProperty: {N: '102'}}}}], ]; -for await (const found of mapper.batchDelete(toRemove)) { - // items will be yielded as they are successfully removed -} -``` - -#### Operations with Expressions - -##### Aplication example - -```js -import { - AttributePath, - FunctionExpression, - UpdateExpression, -} from '@aws/dynamodb-expressions'; - -const expr = new UpdateExpression(); - -// given the anotation bellow -@table('tableName') -class MyRecord { - @hashKey() - email?: string; - - @attribute() - passwordHash?: string; - - @attribute() - passwordSalt?: string; - - @attribute() - verified?: boolean; - @attribute() - verifyToken?: string; +for await (const item of new BatchWrite(dynamoDb, keys)) { + console.log(item); } - -// you make a mapper operation as follows -const aRecord = Object.assign(new MyRecord(), { - email, - passwordHash: password, - passwordSalt: salt, - verified: false, - verifyToken: token, -}); -mapper.put(aRecord, { - condition: new FunctionExpression('attribute_not_exists', new AttributePath('email') -}).then( /* result handler */ ); -``` - -#### Table lifecycle operations - -##### `createTable` - -Creates a table for the mapped class and waits for it to be initialized: - -```typescript -mapper.createTable(MyDomainObject, {readCapacityUnits: 5, writeCapacityUnits: 5}) - .then(() => { - // the table has been provisioned and is ready for use! - }) -``` - -##### `ensureTableExists` - -Like `createTable`, but only creates the table if it doesn't already exist: - -```typescript -mapper.ensureTableExists(MyDomainObject, {readCapacityUnits: 5, writeCapacityUnits: 5}) - .then(() => { - // the table has been provisioned and is ready for use! - }) ``` -##### `deleteTable` - -Deletes the table for the mapped class and waits for it to be removed: - -```typescript -await mapper.deleteTable(MyDomainObject) -``` - -##### `ensureTableNotExists` - -Like `deleteTable`, but only deletes the table if it exists: - -```typescript -await mapper.ensureTableNotExists(MyDomainObject) -``` - -## Constituent packages - -The DataMapper is developed as a monorepo using [`lerna`](https://github.com/lerna/lerna). -More detailed documentation about the mapper's constituent packages is available -by viewing those packages directly. +The above code snippet will automatically split the provided keys into +`BatchWriteItem` requests of 25 or fewer write request objects, and any +unprocessed request objects will be automatically retried until they are +handled. The above code will execute at least 5 `BatchWriteItem` operations, +dependening on how many items are returned without processing due to +insufficient provisioned write capacity. -* [Amazon DynamoDB Automarshaller](packages/dynamodb-auto-marshaller/) -* [Amazon DynamoDB Batch Iterator](packages/dynamodb-batch-iterator/) -* [Amazon DynamoDB DataMapper](packages/dynamodb-data-mapper/) -* [Amazon DynamoDB DataMapper Annotations](packages/dynamodb-data-mapper-annotations/) -* [Amazon DynamoDB Data Marshaller](packages/dynamodb-data-marshaller/) -* [Amazon DynamoDB Expressions](packages/dynamodb-expressions/) -* [Amazon DynamoDB Query Iterator](packages/dynamodb-query-iterator/) +Each item yielded in the `for...await...of` loop will be a single write request +that has succeeded. Iteration will stop once each request has been handled or an +error has been encountered. diff --git a/lerna.json b/lerna.json deleted file mode 100644 index 4eb56197..00000000 --- a/lerna.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "lerna": "2.11.0", - "version": "0.7.3", - "hoist": true -} diff --git a/package.json b/package.json index fda23a81..2f0b7d2b 100644 --- a/package.json +++ b/package.json @@ -1,11 +1,26 @@ { - "name": "dynamodb-data-mapper-js", - "private": true, + "name": "@aws/dynamodb-batch-iterator", + "version": "0.7.1", + "description": "Abstraction for DynamoDB batch reads and writes for that handles batch splitting and partial retries with exponential backoff", + "keywords": [ + "aws", + "dynamodb" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" + }, + "bugs": { + "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" + }, + "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-batch-iterator/", + "main": "./build/index.js", + "types": "./build/index.d.ts", "scripts": { - "bootstrap": "lerna bootstrap", - "docs": "typedoc src && lerna run docs", - "pretest": "lerna run pretest", - "test": "jest" + "docs": "typedoc src", + "prepublishOnly": "tsc", + "pretest": "tsc -p tsconfig.test.json", + "test": "jest \"build/(.+).spec.js\"" }, "author": { "name": "AWS SDK for JavaScript Team", @@ -13,21 +28,26 @@ }, "license": "Apache-2.0", "devDependencies": { - "@types/jest": "^24", - "@types/node": "^8.0.4", - "jest": "^24", - "lerna": "^3.13", - "typedoc": "^0.14.0", - "typescript": "^3.4" + "@types/jest": "^27.0.3", + "@types/node": "^16.11.11", + "jest": "^27.4.3", + "prettier": "^2.5.0", + "typedoc": "^0.22.10", + "typescript": "^4.5.2" }, - "dependencies": { + "peerDependencies": { "aws-sdk": "^2.7.0" }, - "workspaces": [ - "packages/*" - ], + "dependencies": { + "aws-sdk": "^2.1042.0", + "tslib": "^2.3.1", + "utf8-bytes": "^0.0.1" + }, "jest": { "testEnvironment": "node", - "testPathIgnorePatterns": ["/node_modules/", ".ts"] + "testPathIgnorePatterns": [ + "/node_modules/", + ".ts" + ] } } diff --git a/packages/dynamodb-auto-marshaller/LICENSE b/packages/dynamodb-auto-marshaller/LICENSE deleted file mode 100644 index da05f5c9..00000000 --- a/packages/dynamodb-auto-marshaller/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2017 Amazon.com, Inc. or its affiliates - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/dynamodb-auto-marshaller/README.md b/packages/dynamodb-auto-marshaller/README.md deleted file mode 100644 index 48b7f685..00000000 --- a/packages/dynamodb-auto-marshaller/README.md +++ /dev/null @@ -1,152 +0,0 @@ -# Amazon DynamoDB Automarshaller - -[![Apache 2 License](https://img.shields.io/github/license/awslabs/dynamodb-data-mapper-js.svg?style=flat)](http://aws.amazon.com/apache-2-0/) - -This library provides a `Marshaller` class that converts native JavaScript -values to DynamoDB AttributeValues and back again. It's designed to work with -ES6 features like sets, maps, and iterables, and can be configured to support -data types only supported by JavaScript (such as empty binary buffers) or by -Amazon DynamoDB (such as numbers of arbitrary size) with minimal tradeoffs. - -## Getting started - -To use the `Marshaller` to convert a JavaScript object to the data type expected -by Amazon DynamoDB, simply create an instance of the marshaller and call -`marshallItem`: - -```typescript -import {BinarySet, Marshaller} from '@aws/dynamodb-auto-marshaller'; - -const marshaller = new Marshaller(); -const original = { - string: 'a string', - number: 1234, - list: [ - 'a', - 'list', - 'of', - 'values', - ], - buffer: Buffer.from([0xde, 0xad, 0xbe, 0xef]), - setOfBuffers: new BinarySet([ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]), - Uint8Array.from([0xfa, 0xce]), - ] as Iterable), - stringSet: new Set([ - 'foo', - 'bar', - 'baz', - ]), - any: { - level: { - of: { - nesting: { - is: { - supported: true - } - } - } - } - }, -} - -// create a variable ready to be used with DynamoDB's low-level API -const marshalled = marshaller.marshallItem(original); - -// the output of `.marshallItem` can be converted back to a JavaScript type with -// `.unmarshallItem` -const unmarshalled = marshaller.unmarshallItem(original); - -// With a few caveats (listed below), the unmarshalled value should have the -// same structure and data as the original value. -deepEqual(original, unmarshalled); // true -``` - -Values may be converted to and from AttributeValue objects with `.marshallValue` -and `.unmarshallValue` directly: - -```typescript -import {Marshaller} from '@aws/dynamodb-auto-marshaller'; - -const marshaller = new Marshaller(); -const marshalled = marshaller.marshallValue('string'); // returns {S: 'string'} -const unmarshalled = marshaller.unmarshallValue(marshalled); // returns 'string' -``` - -## Caveats - -There are a few categories of values that cannot be seamlessly converted between -JavaScript and DynamoDB, such as big numbers, empty values, and mixed-type sets. - -### Big number support - -By default, the marshaller will unmarshall numeric values returned by DynamoDB -into instances of `NumberValue` rather than into native JavaScript numbers. -Numbers in DynamoDB may have up to 38 digits of precision, which exceeds the -precision available in a JavaScript `number`. `NumberValue` instances therefore -store the value returned by DynamoDB as a string and will only coerce the value -into a JavaScript number when the instance appears in an arithmetic expression, -when it is passed to `JSON.stringify`, or when its `valueOf` method is called. - -Similarly, numeric sets are returned as `NumberValueSet` instances that are -compatible with both numbers and `NumberValue`s. - -To disable this behavior, pass a configuration options argument to the -`Marshaller` constructor with `unwrapNumbers` set to `true`: - -```typescript -import {Marshaller} from '@aws/dynamodb-auto-marshaller'; - -const marshaller = new Marshaller({unwrapNumbers: true}); -``` - -### Empty value support - -DynamoDB's data model requires that strings, sets, and binary attributes have -lengths greater than zero, whereas JavaScript has no such requirement. By -default, the marshaller will not attempt to alter empty values, as the -marshaller would not be able to disambiguate any sigil value used from a field -that was meant to legitimately contain that value. - -The marshaller offers two opt-in options for handling empty values, both of -which are controlled using the `onEmpty` configuration option. - -Settting `onEmpty` to `'nullify'` will direct the marshaller to convert empty -values to null attribute values (`{NULL: true}`) and persist them to DynamoDB. -This allows consumers of the item to know that an empty value was saved, though -it will be slightly altered. When fetched from DynamoDB, the value will be -unmarshalled as `null`: - -```typescript -import {Marshaller} from '@aws/dynamodb-auto-marshaller'; - -const marshaller = new Marshaller({onEmpty: 'nullify'}); -const marshalled = marshaller.marshallValue(''); // returns {NULL: true} -const unmarshalled = marshaller.unmarshallValue(marshalled); // returns null -``` - -Setting `onEmpty` to `'omit'` will direct the marshaller to remove empty values -from the serialized item: - -```typescript -import {Marshaller} from '@aws/dynamodb-auto-marshaller'; - -const marshaller = new Marshaller({onEmpty: 'omit'}); -const marshalled = marshaller.marshallValue(''); // returns undefined -const unmarshalled = marshaller.unmarshallValue(marshalled); // returns undefined -``` - -### Symbols and functions - -By default, the marshaller will throw an error when it encounters a symbol or -function. You can direct the marshaller to instead omit such values from its -output by setting the `onInvalid` configuration option to `'omit'`: - -```typescript -import {Marshaller} from '@aws/dynamodb-auto-marshaller'; - -const marshaller = new Marshaller({onInvalid: 'omit'}); -const marshalled = marshaller.marshallValue(Symbol.iterator); // returns undefined -const unmarshalled = marshaller.unmarshallValue(marshalled); // returns undefined -``` diff --git a/packages/dynamodb-auto-marshaller/package.json b/packages/dynamodb-auto-marshaller/package.json deleted file mode 100644 index 62bad353..00000000 --- a/packages/dynamodb-auto-marshaller/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@aws/dynamodb-auto-marshaller", - "version": "0.7.1", - "description": "A data marshaller that converts JavaScript types into Amazon DynamoDB AttributeValues", - "keywords": [ - "aws", - "dynamodb" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" - }, - "bugs": { - "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" - }, - "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-auto-marshaller/", - "main": "./build/index.js", - "types": "./build/index.d.ts", - "scripts": { - "docs": "typedoc src", - "prepublishOnly": "tsc", - "pretest": "tsc -p tsconfig.test.json", - "test": "jest \"build/(.+).spec.js\"" - }, - "author": { - "name": "AWS SDK for JavaScript Team", - "email": "aws-sdk-js@amazon.com" - }, - "license": "Apache-2.0", - "devDependencies": { - "@types/jest": "^24", - "@types/node": "^8.0.4", - "aws-sdk": "^2.7.0", - "jest": "^24", - "typedoc": "^0.14.0", - "typescript": "^3.4" - }, - "peerDependencies": { - "aws-sdk": "^2.7.0" - }, - "dependencies": { - "tslib": "^1.9" - } -} diff --git a/packages/dynamodb-auto-marshaller/src/BinarySet.spec.ts b/packages/dynamodb-auto-marshaller/src/BinarySet.spec.ts deleted file mode 100644 index 045ad2bc..00000000 --- a/packages/dynamodb-auto-marshaller/src/BinarySet.spec.ts +++ /dev/null @@ -1,183 +0,0 @@ -import {BinarySet} from "./BinarySet"; - -describe('BinarySet', () => { - it('should create a set with values provided to the constructor', () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - - expect(set.size).toBe(2); - - expect(set.has(new Uint8Array([0xde, 0xad]))).toBe(true); - expect(set.has(new Uint8Array([0xbe, 0xef]))).toBe(true); - expect(set.has(new Uint8Array([0xfa, 0xce]))).toBe(false); - }); - - describe('#add', () => { - it('should add new values to the set', () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - new Uint8Array(0), - ]); - expect(set.has(new Uint8Array([0xfa, 0xce]))).toBe(false); - - set.add(new Uint8Array([0xfa, 0xce])); - expect(set.has(new Uint8Array([0xfa, 0xce]))).toBe(true); - }); - - it('should be a no-op if the value is already in the set', () => { - const set = new BinarySet([new Uint8Array(1)]); - expect(set.size).toBe(1); - set.add(new ArrayBuffer(1)); - expect(set.size).toBe(1); - }); - }); - - describe('#clear', () => { - it('should drop all values', () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - set.clear(); - expect(set.size).toBe(0); - }); - }); - - describe('#delete', () => { - it( - 'should return `true` and remove the provided value if it was found in the set', - () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - expect(set.delete(new Uint8Array([0xde, 0xad]))).toBe(true); - expect(set.size).toBe(1); - expect(set.has(new Uint8Array([0xde, 0xad]))).toBe(false); - } - ); - - it( - 'should remove values with the same underlying binary value even if the object is a different view type', - () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - expect(set.delete( - new Int16Array(new Uint8Array([0xde, 0xad]).buffer) - )).toBe(true); - expect(set.size).toBe(1); - } - ); - - it( - 'should return false and be a no-op if the value is not in the set', - () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - expect(set.delete(new Uint8Array([0xfa, 0xce]))).toBe(false); - expect(set.size).toBe(2); - } - ); - }); - - describe('#entries', () => { - it( - 'should provide a [key, value] iterable where the key and value are the same (in line with ES6 Set behavior)', - () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - for (let [key, value] of set.entries()) { - expect(key).toBe(value); - expect(set.has(value)).toBe(true); - } - } - ); - }); - - describe('#forEach', () => { - it('should invoke a callback for each value in the set', () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - const otherSet = new BinarySet(); - set.forEach(otherSet.add, otherSet); - - expect(otherSet.size).toBe(set.size); - }); - }); - - describe('#keys', () => { - it( - 'should iterate over all values in the set (in line with ES6 Set behavior)', - () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - - let iterations = 0; - for (let key of set.keys()) { - expect(set.has(key)).toBe(true); - iterations++; - } - - expect(iterations).toBe(set.size); - } - ); - }); - - describe('#values', () => { - it('should iterate over all values in the set', () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - - let iterations = 0; - for (let key of set.values()) { - expect(set.has(key)).toBe(true); - iterations++; - } - - expect(iterations).toBe(set.size); - }); - }); - - describe('#[Symbol.iterator]', () => { - it('should iterate over all values in the set', () => { - const set = new BinarySet([ - new Uint8Array([0xde, 0xad]), - new Uint8Array([0xbe, 0xef]), - ]); - - let iterations = 0; - for (let key of set) { - expect(set.has(key)).toBe(true); - iterations++; - } - - expect(iterations).toBe(set.size); - }); - }); - - describe('#[Symbol.toStringTag]', () => { - it('should return a static value of "Set"', () => { - expect(new BinarySet()[Symbol.toStringTag]).toBe('Set'); - }); - - it('should cause toString to return a Set-identifying string', () => { - expect(Object.prototype.toString.call(new BinarySet())) - .toBe('[object Set]'); - }); - }); -}); diff --git a/packages/dynamodb-auto-marshaller/src/BinarySet.ts b/packages/dynamodb-auto-marshaller/src/BinarySet.ts deleted file mode 100644 index 8cd905a6..00000000 --- a/packages/dynamodb-auto-marshaller/src/BinarySet.ts +++ /dev/null @@ -1,68 +0,0 @@ -import {ObjectSet} from "./ObjectSet"; - -export type BinaryValue = ArrayBuffer|ArrayBufferView; - -/** - * A set of binary values represented as either ArrayBuffer objects or - * ArrayBufferView objects. Equality is determined by the underlying byte - * sequence and not by the identity or view window type of the provided value. - */ -export class BinarySet extends ObjectSet { - delete(value: BinaryValue): boolean { - const valueView = getBinaryView(value); - const scrubbedValues = this._values.filter(item => { - return !binaryEquals(getBinaryView(item), valueView); - }); - - const numRemoved = this._values.length - scrubbedValues.length; - this._values = scrubbedValues; - - return numRemoved > 0; - } - - /** - * @inheritDoc - * - * Equality is determined by inspecting the bytes of the ArrayBuffer or - * ArrayBufferView. - * - * @example On a little-endian system, the following values would be - * considered equal: - * - * new Uint32Array([0xdeadbeef]); - * (new Uint32Array([0xdeadbeef])).buffer; - * new Uint16Array([0xbeef, 0xdead]); - * new Uint8Array([0xef, 0xbe, 0xad, 0xde]); - */ - has(value: BinaryValue): boolean { - const valueView = getBinaryView(value); - - for (let item of this) { - if (binaryEquals(getBinaryView(item), valueView)) { - return true; - } - } - - return false; - } -} - -function binaryEquals(a: DataView, b: DataView): boolean { - if (a.byteLength !== b.byteLength) { - return false; - } - - for (let i = 0; i < a.byteLength; i++) { - if (a.getUint8(i) !== b.getUint8(i)) { - return false; - } - } - - return true; -} - -function getBinaryView(value: BinaryValue): DataView { - return ArrayBuffer.isView(value) - ? new DataView(value.buffer, value.byteOffset, value.byteLength) - : new DataView(value); -} diff --git a/packages/dynamodb-auto-marshaller/src/Marshaller.spec.ts b/packages/dynamodb-auto-marshaller/src/Marshaller.spec.ts deleted file mode 100644 index 46190a8a..00000000 --- a/packages/dynamodb-auto-marshaller/src/Marshaller.spec.ts +++ /dev/null @@ -1,771 +0,0 @@ -import {Marshaller} from "./Marshaller"; -import {BinarySet} from "./BinarySet"; -import {NumberValue} from "./NumberValue"; -import {NumberValueSet} from "./NumberValueSet"; - -describe('Marshaller', () => { - describe('#marshallItem', () => { - it('should convert objects to the DynamoDB item format', () => { - const marshaller = new Marshaller(); - const marshalled = marshaller.marshallItem({ - string: 'foo', - list: ['fizz', 'buzz', 'pop'], - map: { - nestedMap: { - key: 'value', - } - }, - number: 123, - nullValue: null, - boolValue: true, - stringSet: new Set(['foo', 'bar', 'baz']) - }); - - expect(marshalled).toEqual({ - string: {S: 'foo'}, - list: {L: [{S: 'fizz'}, {S: 'buzz'}, {S: 'pop'}]}, - map: { - M: { - nestedMap: { - M: { - key: {S: 'value'} - } - } - } - }, - number: {N: '123'}, - nullValue: {NULL: true}, - boolValue: {BOOL: true}, - stringSet: {SS: ['foo', 'bar', 'baz']} - }); - }); - - it( - 'should return an empty attribute map when provided invalid input and the onInvalid option is set to "omit"', - () => { - const marshaller = new Marshaller({onInvalid: 'omit'}); - expect(marshaller.marshallItem('string' as any)).toEqual({}); - } - ); - - it('should throw when provided invalid input and the onInvalid option is set to "throw"', () => { - const marshaller = new Marshaller({onInvalid: 'throw'}); - expect(() => marshaller.marshallItem('string' as any)).toThrow(); - }); - }); - - describe('#marshallValue', () => { - describe('strings', () => { - it('should convert strings to StringAttributeValues', () => { - expect((new Marshaller()).marshallValue('string')) - .toEqual({S: 'string'}); - }); - - it( - 'should convert empty strings to null when onEmpty option set to "nullify"', - () => { - expect( - (new Marshaller({onEmpty: 'nullify'})).marshallValue('') - ).toEqual({NULL: true}); - } - ); - - it( - 'should remove empty strings when onEmpty option set to "omit"', - () => { - expect( - (new Marshaller({onEmpty: 'omit'})).marshallValue('') - ).toBeUndefined(); - } - ); - - it( - 'should convert empty strings to StringAttributeValues otherwise', - () => { - expect((new Marshaller()).marshallValue('')) - .toEqual({S: ''}); - } - ); - }); - - describe('binary values', () => { - it('should convert binary values to BinaryAttributeValues', () => { - const bin = Uint8Array.from([0xde, 0xad, 0xbe, 0xef]); - expect((new Marshaller()).marshallValue(bin)) - .toEqual({B: bin}); - }); - - it( - 'should convert empty binary values to null when onEmpty option set to "nullify"', - () => { - expect( - (new Marshaller({onEmpty: 'nullify'})) - .marshallValue(new Uint8Array(0)) - ).toEqual({NULL: true}); - } - ); - - it( - 'should omit empty binary values when onEmpty option set to "omit"', - () => { - expect( - (new Marshaller({onEmpty: 'omit'})) - .marshallValue(new Uint8Array(0)) - ).toBeUndefined(); - } - ); - - it( - 'should convert empty binary values to null when onEmpty option set to "nullify"', - () => { - expect( - (new Marshaller()).marshallValue(new Uint8Array(0)) - ).toEqual({B: new Uint8Array(0)}); - } - ); - }); - - describe('numbers', () => { - it('should convert numbers to NumberAttributeValues', () => { - expect((new Marshaller()).marshallValue(42)) - .toEqual({N: '42'}); - }); - - it('should convert NumberValues to NumberAttributeValues', () => { - expect( - (new Marshaller()).marshallValue(new NumberValue('123')) - ).toEqual({N: '123'}); - }); - }); - - describe('null', () => { - it('should convert nulls to NullAttributeValues', () => { - expect((new Marshaller()).marshallValue(null)) - .toEqual({NULL: true}); - }); - }); - - describe('boolean', () => { - it('should convert booleans to BooleanAttributeValues', () => { - const marshaller = new Marshaller(); - expect(marshaller.marshallValue(true)).toEqual({BOOL: true}); - expect(marshaller.marshallValue(false)).toEqual({BOOL: false}); - }); - }); - - describe('lists', () => { - it('should convert arrays to ListAttributeValues', () => { - expect((new Marshaller()).marshallValue([])).toEqual({L: []}); - }); - - it('should convert list members to AttributeValues', function() { - expect( - (new Marshaller()).marshallValue(['a', 1, true, null, {}]) - ).toEqual({L: [ - {S: 'a'}, - {N: '1'}, - {BOOL: true}, - {NULL: true}, - {M: {}}, - ]}); - }); - - it('should convert iterables to ListAttributeValues', () => { - const inputGen = function *() { - yield 'a'; - yield 1; - yield true; - yield null; - yield {}; - }; - - expect( - (new Marshaller()).marshallValue(inputGen()) - ).toEqual({L: [ - {S: 'a'}, - {N: '1'}, - {BOOL: true}, - {NULL: true}, - {M: {}}, - ]}); - }); - - it('should omit undefined values from the serialized list', () => { - expect( - (new Marshaller()) - .marshallValue([ - 'a', - undefined, - 1, - undefined, - true, - undefined, - null, - undefined, - {} - ]) - ).toEqual({L: [ - {S: 'a'}, - {N: '1'}, - {BOOL: true}, - {NULL: true}, - {M: {}}, - ]}); - }); - }); - - describe('maps', () => { - it('should convert objects to MapAttributeValues', () => { - expect((new Marshaller()).marshallValue({})).toEqual({M: {}}); - }); - - it('should convert maps to MapAttributeValues', () => { - expect((new Marshaller()).marshallValue(new Map())) - .toEqual({M: {}}); - }); - - it( - 'should omit keys whose values are serialized as undefined', - () => { - const marshaller = new Marshaller(); - expect(marshaller.marshallValue({a: void 0})) - .toEqual({M: {}}); - - expect(marshaller.marshallValue(new Map([['a', void 0]]))) - .toEqual({M: {}}); - } - ); - - it( - 'should convert objects with inheritance chains to MapAttributeValues', - () => { - class MyPrototype { - public readonly foo: string = 'bar'; - } - - class MyDescendant extends MyPrototype { - public readonly fizz: string = 'buzz'; - } - - const myInstance = new MyDescendant(); - (myInstance as any).quux = true; - - expect((new Marshaller()).marshallValue(myInstance)) - .toEqual({ - M: { - foo: {S: 'bar'}, - fizz: {S: 'buzz'}, - quux: {BOOL: true} - } - }); - } - ); - - it('should convert map members to AttributeValues', () => { - const map = new Map(); - map.set('a', 'a'); - map.set('b', 1); - map.set('c', true); - map.set('d', null); - map.set('e', ['s']); - - expect((new Marshaller()).marshallValue(map)).toEqual({ - M: { - a: {S: 'a'}, - b: {N: '1'}, - c: {BOOL: true}, - d: {NULL: true}, - e: {L: [{S: 's'}]} - } - }); - }); - - it( - 'should omit map members whose keys are not strings when the onInvalid option is "omit"', - () => { - const marshaller = new Marshaller({ - onInvalid: 'omit' - }); - const map = new Map(); - map.set('a', 'a'); - map.set(1, 1); - map.set({}, true); - map.set([], null); - map.set(null, ['s']); - - expect(marshaller.marshallValue(map)) - .toEqual({M: {a: {S: 'a'}}}); - } - ); - - it('should throw otherwise', () => { - const marshaller = new Marshaller(); - const map = new Map(); - map.set('a', 'a'); - map.set(1, 1); - map.set({}, true); - map.set([], null); - map.set(null, ['s']); - - expect(() => marshaller.marshallValue(map)).toThrow(); - }); - }); - - describe('sets', () => { - it( - 'should omit empty sets when the onEmpty option is "omit"', - () => { - const marshaller = new Marshaller({onEmpty: 'omit'}); - expect(marshaller.marshallValue(new Set())) - .toBeUndefined(); - } - ); - - it( - 'should convert empty sets to null when the onEmpty option is "nullify"', - () => { - const marshaller = new Marshaller({onEmpty: 'nullify'}); - expect(marshaller.marshallValue(new Set())) - .toEqual({NULL: true}); - } - ); - it( - 'should omit empty sets when the onEmpty option is "leave", as the kind of set cannot be inferred', - () => { - const marshaller = new Marshaller({onEmpty: 'leave'}); - expect(marshaller.marshallValue(new Set())) - .toBeUndefined(); - } - ); - - it( - 'should omit sets with members of an unknown type when the onEmpty option is "omit"', - () => { - const marshaller = new Marshaller({ - onInvalid: 'omit' - }); - const set = new Set(); - set.add({}); - expect(marshaller.marshallValue(set)) - .toBeUndefined(); - } - ); - - it( - 'should throw on sets with members of an unknown type otherwise', - () => { - const marshaller = new Marshaller(); - const set = new Set(); - set.add({}); - expect(() => marshaller.marshallValue(set)).toThrow(); - } - ); - - it( - 'should drop invalid members when onInvalid option is set to "omit"', - () => { - const marshaller = new Marshaller({ - onInvalid: 'omit' - }); - expect(marshaller.marshallValue(new Set(['a', 1, 'c']))) - .toEqual({SS: ['a', 'c']}); - } - ); - - it('should throw on invalid members otherwise', () => { - const marshaller = new Marshaller(); - expect( - () => marshaller.marshallValue(new Set(['a', 1, 'c'])) - ).toThrow(); - }); - - it( - 'should return a NullAttributeValue for an emptied set when onEmpty is set to "nullify"', - () => { - const marshaller = new Marshaller({onEmpty: 'nullify'}); - expect(marshaller.marshallValue(new Set(['']))) - .toEqual({NULL: true}); - } - ); - - it( - 'should return undefined for an emptied set when onEmpty is set to "omit"', - () => { - const marshaller = new Marshaller({onEmpty: 'omit'}); - expect(marshaller.marshallValue(new Set(['']))) - .toBeUndefined(); - } - ); - - it('should serialize empty values otherwise', () => { - const marshaller = new Marshaller(); - expect(marshaller.marshallValue(new Set(['']))) - .toEqual({SS: ['']}); - }); - - describe('string sets', () => { - it( - 'should convert sets with strings into StringSetAttributeValues', - () => { - expect( - (new Marshaller()) - .marshallValue(new Set(['a', 'b', 'c'])) - ).toEqual({SS: ['a', 'b', 'c']}); - } - ); - - it( - 'should drop empty members when onEmpty option is set to "nullify"', - () => { - expect( - (new Marshaller({onEmpty: 'nullify'})) - .marshallValue(new Set(['a', '', 'c'])) - ).toEqual({SS: ['a', 'c']}); - } - ); - - it( - 'should drop empty members when onEmpty option is set to "omit"', - () => { - expect( - (new Marshaller({onEmpty: 'omit'})) - .marshallValue(new Set(['a', '', 'c'])) - ).toEqual({SS: ['a', 'c']}); - } - ); - - it('should keep empty members otherwise', () => { - expect( - (new Marshaller()) - .marshallValue(new Set(['a', '', 'c'])) - ).toEqual({SS: ['a', '', 'c']}); - }); - }); - - describe('number sets', () => { - it( - 'should convert sets with numbers into NumberSetAttributeValues', - () => { - expect( - (new Marshaller()) - .marshallValue(new Set([1, 2, 3])) - ).toEqual({NS: ['1', '2', '3']}); - } - ); - - it( - 'should convert NumberValueSet objects into NumberSetAttributeValues', - () => { - expect( - (new Marshaller()) - .marshallValue(new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - new NumberValue('3'), - ])) - ).toEqual({NS: ['1', '2', '3']}); - } - ); - }); - - describe('binary sets', () => { - it( - 'should convert sets with binary values into BinarySetAttributeValues', - () => { - const marshaller = new Marshaller(); - const converted = marshaller.marshallValue(new BinarySet([ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]).buffer, - Uint8Array.from([0xfa, 0xce]), - ])); - expect(converted).toEqual({BS: [ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]).buffer, - Uint8Array.from([0xfa, 0xce]), - ]}); - } - ); - - it( - 'should drop empty members when the onEmpty option is set to "nullify"', - () => { - const marshaller = new Marshaller({onEmpty: 'nullify'}); - const converted = marshaller.marshallValue(new BinarySet([ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]).buffer, - Uint8Array.from([0xfa, 0xce]), - new Uint8Array(0), - ])); - expect(converted).toEqual({BS: [ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]).buffer, - Uint8Array.from([0xfa, 0xce]), - ]}); - } - ); - - it( - 'should drop empty members when the onEmpty option is set to "omit"', - () => { - const marshaller = new Marshaller({onEmpty: 'omit'}); - const converted = marshaller.marshallValue(new BinarySet([ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]).buffer, - Uint8Array.from([0xfa, 0xce]), - new Uint8Array(0), - ])); - expect(converted).toEqual({BS: [ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]).buffer, - Uint8Array.from([0xfa, 0xce]), - ]}); - } - ); - - it('should keep empty members otherwise', () => { - const marshaller = new Marshaller(); - const converted = marshaller.marshallValue(new BinarySet([ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]).buffer, - Uint8Array.from([0xfa, 0xce]), - new Uint8Array(0), - ])); - expect(converted).toEqual({BS: [ - Uint8Array.from([0xde, 0xad]), - Uint8Array.from([0xbe, 0xef]).buffer, - Uint8Array.from([0xfa, 0xce]), - new Uint8Array(0), - ]}); - }); - }); - }); - - describe('undefined values', () => { - it('should return undefined for undefined', () => { - expect((new Marshaller().marshallValue(void 0))) - .toBeUndefined(); - }); - }); - - describe('symbols', () => { - it( - 'should omit symbols when the onInvalid option is set to "omit"', - () => { - expect( - (new Marshaller({onInvalid: 'omit'}) - .marshallValue(Symbol.iterator)) - ).toBeUndefined(); - } - ); - - it('should throw on symbols otherwise', () => { - expect( - () => (new Marshaller().marshallValue(Symbol.iterator)) - ).toThrow(); - }); - }); - - describe('functions', () => { - it( - 'should omit functions when the onInvalid option is set to "omit"', - () => { - expect( - (new Marshaller({onInvalid: 'omit'}) - .marshallValue(() => {})) - ).toBeUndefined(); - } - ); - - it('should throw on symbols otherwise', () => { - expect( - () => (new Marshaller().marshallValue(() => {})) - ).toThrow(); - }); - }); - }); - - describe('#unmarshallItem', () => { - it('should convert DynamoDB items to plain vanilla JS objects', function() { - var unmarshalled = (new Marshaller({unwrapNumbers: true})).unmarshallItem({ - string: {S: 'foo'}, - list: {L: [{S: 'fizz'}, {S: 'buzz'}, {S: 'pop'}]}, - map: { - M: { - nestedMap: { - M: { - key: {S: 'value'} - } - } - } - }, - number: {N: '123'}, - nullValue: {NULL: true}, - boolValue: {BOOL: true} - }); - - expect(unmarshalled).toEqual({ - string: 'foo', - list: ['fizz', 'buzz', 'pop'], - map: { - nestedMap: { - key: 'value', - } - }, - number: 123, - nullValue: null, - boolValue: true - }); - }); - }); - - describe('#unmarshallValue', () => { - const marshaller = new Marshaller(); - describe('strings', () => { - it('should convert StringAttributeValues to strings', () => { - expect(marshaller.unmarshallValue({S: 'string'})) - .toEqual('string'); - }); - }); - - describe('binary values', () => { - it('should convert BinaryAttributeValues to binary values', () => { - expect(marshaller.unmarshallValue({B: new Uint8Array(1)})) - .toEqual(new Uint8Array(1)); - }); - }); - - describe('numbers', () => { - it( - 'should convert NumberAttributeValues to NumberValues', - () => { - const unsafeInteger = '9007199254740991000'; - const converted = marshaller.unmarshallValue({N: unsafeInteger}) as NumberValue; - expect(converted.toString()).toBe(unsafeInteger); - } - ); - - it( - 'should convert NumberAttributeValues to numbers when unwrapNumbers is true', - () => { - const marshaller = new Marshaller({unwrapNumbers: true}); - expect(marshaller.unmarshallValue({N: '42'})).toEqual(42); - } - ); - }); - - describe('null', () => { - it('should convert NullAttributeValues to null', () => { - expect(marshaller.unmarshallValue({NULL: true})).toEqual(null); - }); - }); - - describe('boolean', () => { - it('should convert BooleanAttributeValues to booleans', () => { - expect(marshaller.unmarshallValue({BOOL: true})).toEqual(true); - expect(marshaller.unmarshallValue({BOOL: false})) - .toEqual(false); - }); - }); - - describe('lists', () => { - it('should convert ListAttributeValues to lists', () => { - expect(marshaller.unmarshallValue({L: []})).toEqual([]); - }); - - it('should convert member AttributeValues to list members', () => { - expect(marshaller.unmarshallValue({L: [ - {S: 'a'}, - {N: '1'}, - {BOOL: true}, - {NULL: true}, - {M: {}} - ]})).toEqual(['a', new NumberValue('1'), true, null, {}]); - }); - }); - - describe('maps', () => { - it('should convert MapAttributeValues to objects', () => { - expect(marshaller.unmarshallValue({M: {}})).toEqual({}); - }); - - it('should convert member AttributeValues to map members', () => { - expect(marshaller.unmarshallValue({ - M: { - a: {S: 'a'}, - b: {N: '1'}, - c: {BOOL: true}, - d: {NULL: true}, - e: {L: [{S: 's'}]} - } - })).toEqual({ - a: 'a', - b: new NumberValue('1'), - c: true, - d: null, - e: ['s'], - }); - }); - }); - - describe('string sets', () => { - it( - 'should convert StringSetAttributeValues into sets with strings', - () => { - expect(marshaller.unmarshallValue({SS: ['a', 'b', 'c']})) - .toEqual(new Set(['a', 'b', 'c'])); - } - ); - }); - - describe('number sets', () => { - it( - 'should convert NumberSetAttributeValues into sets with NumberValues', - function() { - const unsafeInteger = '900719925474099100'; - const converted = marshaller.unmarshallValue({NS: [ - unsafeInteger + '1', - unsafeInteger + '2', - unsafeInteger + '3', - ]}); - - expect(converted).toEqual(new NumberValueSet([ - new NumberValue(unsafeInteger + '1'), - new NumberValue(unsafeInteger + '2'), - new NumberValue(unsafeInteger + '3'), - ])); - } - ); - - it( - 'should convert NumberSetAttributeValues into sets with numbers when unwrapNumbers is true', - () => { - const marshaller = new Marshaller({unwrapNumbers: true}); - expect(marshaller.unmarshallValue({NS: ['1', '2', '3']})) - .toEqual(new Set([1, 2, 3])); - } - ); - }); - - describe('binary sets', () => { - it( - 'should convert BinarySetAttributeValues into sets with binary strings', - () => { - expect( - marshaller.unmarshallValue({BS: [ - new Uint8Array(1), - new Uint8Array(2), - ]}) - ).toEqual(new BinarySet([ - new Uint8Array(1), - new Uint8Array(2), - ])); - } - ); - }); - - it('should convert objects with no values to empty maps', () => { - expect(marshaller.unmarshallValue({foo: 'bar'} as any)) - .toEqual({}); - }); - }); -}); diff --git a/packages/dynamodb-auto-marshaller/src/Marshaller.ts b/packages/dynamodb-auto-marshaller/src/Marshaller.ts deleted file mode 100644 index 2108238b..00000000 --- a/packages/dynamodb-auto-marshaller/src/Marshaller.ts +++ /dev/null @@ -1,451 +0,0 @@ -import {AttributeMap, AttributeValue} from "aws-sdk/clients/dynamodb"; -import {BinarySet, BinaryValue} from "./BinarySet"; -import {isArrayBuffer} from "./isArrayBuffer"; -import {NumberValue} from "./NumberValue"; -import {NumberValueSet} from "./NumberValueSet"; - -export const EmptyHandlingStrategies = { - omit: 'omit', - nullify: 'nullify', - leave: 'leave', -}; - -/** - * The behavior the marshaller should exhibit when it encounters "empty" - * data that would be rejected as invalid by DynamoDB, such as 0-length - * buffers or the string `''`. - * - * Possible values: - * * `omit` - Remove the empty value from the marshalled output (i.e., marshall - * this value to `undefined` rather than to an {AttributeValue}). - * - * * `nullify` - Convert the value from its detected to data type to `null`. - * This allows marshalled data to preserve a sigil of emptiness in a way - * compatible with DynamoDB. - * - * This option will also cause empty strings and buffers to be dropped from - * string and binary sets, respectively. - * - * * `leave` - Do not alter the value. - */ -export type EmptyHandlingStrategy = keyof typeof EmptyHandlingStrategies; - -export const InvalidHandlingStrategies = { - /** - * Remove any invalid values from the serialized output. - */ - omit: 'omit', - - /** - * Throw an error when an unserializable value is encountered. - */ - throw: 'throw', -}; - -/** - * The behavior the marshaller should exhibit when it encounters data that - * cannot be marshalled to a DynamoDB AttributeValue, such as a Symbol or - * Function object. - * - * Possible values: - * * `omit` - Remove any invalid values from the serialized output. - * - * * `throw` - Throw an error when an unserializable value is encountered. - */ -export type InvalidHandlingStrategy = keyof typeof InvalidHandlingStrategies; - -export type UnmarshalledAttributeValue = - string | - number | - NumberValue | - BinaryValue | - Set | - Set | - NumberValueSet | - BinarySet | - null | - boolean | - UnmarshalledListAttributeValue | - UnmarshalledMapAttributeValue; - -export interface UnmarshalledListAttributeValue extends Array {} - -export interface UnmarshalledMapAttributeValue { - [key: string]: UnmarshalledAttributeValue; -} - -export interface MarshallingOptions { - /** - * The behavior the marshaller should exhibit when it encounters "empty" - * data that would be rejected as invalid by DynamoDB, such as 0-length - * buffers or the string `''`. - */ - onEmpty?: EmptyHandlingStrategy; - - /** - * The behavior the marshaller should exhibit when it encounters data that - * cannot be marshalled to a DynamoDB AttributeValue, such as a Symbol or - * Function object. - */ - onInvalid?: InvalidHandlingStrategy; - - /** - * Whether numbers should be unmarshalled to a special object type that can - * preserve values that would lose precision if converted to JavaScript's - * native number type. - */ - unwrapNumbers?: boolean; -} - -/** - * A class that will convert arbitrary JavaScript data types to their most - * logical in the DynamoDB schema. - */ -export class Marshaller { - private readonly onEmpty: EmptyHandlingStrategy; - private readonly onInvalid: InvalidHandlingStrategy; - private readonly unwrapNumbers: boolean; - - constructor({ - onEmpty = 'leave', - onInvalid = 'throw', - unwrapNumbers = false - }: MarshallingOptions = {}) { - this.onEmpty = onEmpty; - this.onInvalid = onInvalid; - this.unwrapNumbers = unwrapNumbers; - } - - /** - * Convert a JavaScript object with string keys and arbitrary values into an - * object with string keys and DynamoDB AttributeValue objects as values. - */ - public marshallItem(item: {[key: string]: any}): AttributeMap { - const value = this.marshallValue(item); - if (!(value && value.M) && this.onInvalid === 'throw') { - throw new Error( - `Cannot serialize ${typeof item} as an attribute map` - ); - } - - return value && value.M ? value.M : {}; - } - - /** - * Convert a JavaScript value into a DynamoDB AttributeValue or `undefined`. - * - * @throws Error if the value cannot be converted to a DynamoDB type and the - * marshaller has been configured to throw on invalid input. - */ - public marshallValue(value: any): AttributeValue|undefined { - switch (typeof value) { - case 'boolean': - return {BOOL: value}; - case 'number': - return {N: value.toString(10)}; - case 'object': - return this.marshallComplexType(value); - case 'string': - return value ? {S: value} : this.handleEmptyString(value); - case 'undefined': - return undefined; - case 'function': - case 'symbol': - default: - if (this.onInvalid === 'throw') { - throw new Error( - `Cannot serialize values of the ${typeof value} type` - ); - } - } - } - - /** - * Convert a DynamoDB operation result (an object with string keys and - * AttributeValue values) to an object with string keys and native - * JavaScript values. - */ - public unmarshallItem(item: AttributeMap): UnmarshalledMapAttributeValue { - return this.unmarshallValue({M: item}) as UnmarshalledMapAttributeValue; - } - - /** - * Convert a DynamoDB AttributeValue into a native JavaScript value. - */ - public unmarshallValue(item: AttributeValue): UnmarshalledAttributeValue { - if (item.S !== undefined) { - return item.S; - } - - if (item.N !== undefined) { - return this.unwrapNumbers - ? Number(item.N) - : new NumberValue(item.N); - } - - if (item.B !== undefined) { - return item.B as BinaryValue; - } - - if (item.BOOL !== undefined) { - return item.BOOL; - } - - if (item.NULL !== undefined) { - return null; - } - - if (item.SS !== undefined) { - const set = new Set(); - for (let member of item.SS) { - set.add(member); - } - return set; - } - - if (item.NS !== undefined) { - if (this.unwrapNumbers) { - const set = new Set(); - for (let member of item.NS) { - set.add(Number(member)); - } - return set; - } - - return new NumberValueSet( - item.NS.map(numberString => new NumberValue(numberString)) - ); - } - - if (item.BS !== undefined) { - return new BinarySet(item.BS as Array); - } - - if (item.L !== undefined) { - return item.L.map(this.unmarshallValue.bind(this)); - } - - const {M = {}} = item; - return Object.keys(M).reduce( - (map: UnmarshalledMapAttributeValue, key: string) => { - map[key] = this.unmarshallValue(M[key]); - return map; - }, - {} - ); - } - - private marshallComplexType( - value: Set | - Map | - Iterable | - {[key: string]: any} | - null | - NumberValue | - BinaryValue - ): AttributeValue|undefined { - if (value === null) { - return {NULL: true}; - } - - if (NumberValue.isNumberValue(value)) { - return {N: value.toString()}; - } - - if (isBinaryValue(value)) { - return this.marshallBinaryValue(value); - } - - if (isSet(value)) { - return this.marshallSet(value); - } - - if (isMap(value)) { - return this.marshallMap(value); - } - - if (isIterable(value)) { - return this.marshallList(value); - } - - return this.marshallObject(value); - } - - private marshallBinaryValue(binary: BinaryValue): AttributeValue|undefined { - if (binary.byteLength > 0 || this.onEmpty === 'leave') { - return {B: binary}; - } - - if (this.onEmpty === 'nullify') { - return {NULL: true}; - } - } - - private marshallList(list: Iterable): AttributeValue { - const values: Array = []; - for (let value of list) { - const marshalled = this.marshallValue(value); - if (marshalled) { - values.push(marshalled); - } - } - - return {L: values}; - } - - private marshallMap(map: Map): AttributeValue { - const members: {[key: string]: AttributeValue} = {}; - for (let [key, value] of map) { - if (typeof key !== 'string') { - if (this.onInvalid === 'omit') { - continue; - } - - throw new Error( - `MapAttributeValues must have strings as keys; ${typeof key} received instead` - ); - } - - const marshalled = this.marshallValue(value); - if (marshalled) { - members[key] = marshalled; - } - } - - return {M: members}; - } - - private marshallObject(object: {[key: string]: any}): AttributeValue { - return { - M: Object.keys(object).reduce( - (map: AttributeMap, key: string): AttributeMap => { - const marshalled = this.marshallValue(object[key]); - if (marshalled) { - map[key] = marshalled; - } - return map; - }, - {} - ), - }; - } - - private marshallSet(arg: Set): AttributeValue|undefined { - switch (getSetType(arg[Symbol.iterator]().next().value)) { - case 'binary': - return this.collectSet(arg, isBinaryEmpty, 'BS', 'binary'); - case 'number': - return this.collectSet(arg, isNumberEmpty, 'NS', 'number', stringifyNumber); - case 'string': - return this.collectSet(arg, isStringEmpty, 'SS', 'string'); - case 'unknown': - if (this.onInvalid === 'throw') { - throw new Error('Sets must be composed of strings,' + - ' binary values, or numbers'); - } - return undefined; - case 'undefined': - if (this.onEmpty === 'nullify') { - return {NULL: true}; - } - } - } - - private collectSet( - set: Set, - isEmpty: (element: T) => boolean, - tag: 'BS'|'NS'|'SS', - elementType: 'binary'|'number'|'string', - transform?: (arg: T) => R - ): AttributeValue|undefined { - const values: Array = []; - for (let element of set) { - if (getSetType(element) !== elementType) { - if (this.onInvalid === 'omit') { - continue; - } - - throw new Error( - `Unable to serialize ${typeof element} as a member of a ${elementType} set` - ); - } - - if ( - !isEmpty(element) || - this.onEmpty === 'leave' - ) { - values.push(transform ? transform(element) : element); - } - } - - if (values.length > 0 || this.onEmpty === 'leave') { - return {[tag]: values}; - } - - if (this.onEmpty === 'nullify') { - return {NULL: true}; - } - } - - private handleEmptyString(value: string): AttributeValue|undefined { - switch (this.onEmpty) { - case 'leave': - return {S: value}; - case 'nullify': - return {NULL: true}; - } - } -} - -type SetType = 'string'|'number'|'binary'; - -function getSetType(arg: any): SetType|'undefined'|'unknown' { - const type = typeof arg; - if (type === 'string' || type === 'number' || type === 'undefined') { - return type; - } - - if (NumberValue.isNumberValue(arg)) { - return 'number'; - } - - if (ArrayBuffer.isView(arg) || isArrayBuffer(arg)) { - return 'binary'; - } - - return 'unknown'; -} - -function isBinaryEmpty(arg: BinaryValue): boolean { - return arg.byteLength === 0; -} - -function isBinaryValue(arg: any): arg is BinaryValue { - return ArrayBuffer.isView(arg) || isArrayBuffer(arg); -} - -function isIterable(arg: any): arg is Iterable { - return Boolean(arg) && typeof arg[Symbol.iterator] === 'function'; -} - -function isMap(arg: any): arg is Map { - return Boolean(arg) - && Object.prototype.toString.call(arg) === '[object Map]'; -} - -function isNumberEmpty(): boolean { - return false; -} - -function isSet(arg: any): arg is Set { - return Boolean(arg) - && Object.prototype.toString.call(arg) === '[object Set]'; -} - -function isStringEmpty(arg: string): boolean { - return arg.length === 0; -} - -function stringifyNumber(arg: number|NumberValue): string { - return arg.toString(); -} diff --git a/packages/dynamodb-auto-marshaller/src/NumberValue.spec.ts b/packages/dynamodb-auto-marshaller/src/NumberValue.spec.ts deleted file mode 100644 index dcffb29b..00000000 --- a/packages/dynamodb-auto-marshaller/src/NumberValue.spec.ts +++ /dev/null @@ -1,75 +0,0 @@ -import {NumberValue} from "./NumberValue"; - -describe('NumberValue', function() { - it('should store numbers', function() { - const number = new NumberValue(123); - expect(number.toString()).toBe('123'); - }); - - it('should store numeric strings', function() { - const number = new NumberValue('123.1'); - expect(number.toString()).toBe('123.1'); - }); - - it( - 'should store numeric values that would lose precision if converted to JavaScript numbers', - function() { - const number = new NumberValue('900719925474099100'); - if (typeof (Number as any).isSafeInteger === 'function') { - expect((Number as any).isSafeInteger(number.valueOf())) - .toBe(false); - } - expect(number.toString()).toBe('900719925474099100'); - } - ); - - it('should convert numeric strings to numbers', function() { - const number = new NumberValue('123.1'); - expect(number.valueOf()).toBe(123.1); - }); - - it('should allow easy conversion of the value into a number', () => { - const safeNum = new NumberValue('123'); - expect(+safeNum).toBe(123); - expect((safeNum as any) + 1).toBe(124); - }); - - it('should appear as a numeric value when converted to JSON', function() { - expect(JSON.stringify({ - number: new NumberValue('123'), - nested: { - number: new NumberValue('234') - } - })).toBe('{"number":123,"nested":{"number":234}}'); - }); - - it( - 'should reply to Object.prototype.toString with [object DynamoDbNumberValue]', - () => { - const number = new NumberValue('900719925474099100'); - expect(Object.prototype.toString.call(number)) - .toBe('[object DynamoDbNumberValue]'); - } - ); - - describe('::isNumberValue', () => { - it('should return `true` for NumberValue objects', () => { - expect(NumberValue.isNumberValue(new NumberValue('0'))).toBe(true); - }); - - it('should return `false` for other values', () => { - for (const invalid of [ - 'string', - 123, - null, - void 0, - true, - [], - {}, - new Uint8Array(10)] - ) { - expect(NumberValue.isNumberValue(invalid)).toBe(false); - } - }); - }); -}); diff --git a/packages/dynamodb-auto-marshaller/src/NumberValue.ts b/packages/dynamodb-auto-marshaller/src/NumberValue.ts deleted file mode 100644 index 770e1575..00000000 --- a/packages/dynamodb-auto-marshaller/src/NumberValue.ts +++ /dev/null @@ -1,49 +0,0 @@ -const NUMBER_VALUE_TAG = 'DynamoDbNumberValue'; -const EXPECTED_TAG = `[object ${NUMBER_VALUE_TAG}]`; - -/** - * A number that may contain greater precision than can safely be stored in - * JavaScript's `number` data type. Numerical values are represented internally - * as strings (the format used by DynamoDB's JSON-based data representation - * schema). - */ -export class NumberValue { - public readonly value: string; - public readonly [Symbol.toStringTag] = NUMBER_VALUE_TAG; - - constructor(value: string|number) { - this.value = value.toString().trim(); - } - - /** - * Convert the value to its desired JSON representation. Called by - * `JSON.stringify`. - */ - toJSON(): number { - return this.valueOf(); - } - - /** - * Convert the value to its desired string representation. Called - * automatically when objects are coerced into strings. - */ - toString(): string { - return this.value; - } - - /** - * Convert the value to its desired literal representation. Called - * automatically when objects appear in arithmetic expressions. - */ - valueOf(): number { - return Number(this.value); - } - - /** - * Evaluate whether the provided value is a NumberValue object. - */ - static isNumberValue(arg: any): arg is NumberValue { - return (typeof NumberValue === 'function' && arg instanceof NumberValue) - || Object.prototype.toString.call(arg) === EXPECTED_TAG; - } -} diff --git a/packages/dynamodb-auto-marshaller/src/NumberValueSet.spec.ts b/packages/dynamodb-auto-marshaller/src/NumberValueSet.spec.ts deleted file mode 100644 index b13f628c..00000000 --- a/packages/dynamodb-auto-marshaller/src/NumberValueSet.spec.ts +++ /dev/null @@ -1,184 +0,0 @@ -import {NumberValue} from "./NumberValue"; -import {NumberValueSet} from "./NumberValueSet"; - -describe('NumberValueSet', () => { - it('should create a set with values provided to the constructor', () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - - expect(set.size).toBe(2); - - expect(set.has(new NumberValue('1'))).toBe(true); - expect(set.has(new NumberValue('2'))).toBe(true); - expect(set.has(new NumberValue('3'))).toBe(false); - }); - - describe('#add', () => { - it('should add new values to the set', () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - expect(set.has(new NumberValue('3'))).toBe(false); - - set.add(new NumberValue('3')); - expect(set.has(new NumberValue('3'))).toBe(true); - }); - - it('should be a no-op if the value is already in the set', () => { - const set = new NumberValueSet([new NumberValue('3')]); - expect(set.size).toBe(1); - set.add(new NumberValue('3')); - expect(set.size).toBe(1); - }); - - it('should allow adding number primitives', () => { - const set = new NumberValueSet([new NumberValue('3')]); - expect(set.size).toBe(1); - - set.add(3); - expect(set.size).toBe(1); - expect(set.has(3)).toBe(true); - expect(set.has(new NumberValue('3'))).toBe(true); - - set.add(4); - expect(set.size).toBe(2); - expect(set.has(4)).toBe(true); - expect(set.has(new NumberValue('4'))).toBe(true); - }); - }); - - describe('#clear', () => { - it('should drop all values', () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - set.clear(); - expect(set.size).toBe(0); - }); - }); - - describe('#delete', () => { - it( - 'should return `true` and remove the provided value if it was found in the set', - () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - expect(set.delete(new NumberValue('1'))).toBe(true); - expect(set.size).toBe(1); - expect(set.has(new NumberValue('1'))).toBe(false); - } - ); - - it( - 'should return false and be a no-op if the value is not in the set', - () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - expect(set.delete(new NumberValue('3'))).toBe(false); - expect(set.size).toBe(2); - } - ); - }); - - describe('#entries', () => { - it( - 'should provide a [key, value] iterable where the key and value are the same (in line with ES6 Set behavior)', - () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - for (let [key, value] of set.entries()) { - expect(key).toBe(value); - expect(set.has(value)).toBe(true); - } - } - ); - }); - - describe('#forEach', () => { - it('should invoke a callback for each value in the set', () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - const otherSet = new NumberValueSet(); - set.forEach(otherSet.add, otherSet); - - expect(otherSet.size).toBe(set.size); - }); - }); - - describe('#keys', () => { - it( - 'should iterate over all values in the set (in line with ES6 Set behavior)', - () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - - let iterations = 0; - for (let key of set.keys()) { - expect(set.has(key)).toBe(true); - iterations++; - } - - expect(iterations).toBe(set.size); - } - ); - }); - - describe('#values', () => { - it('should iterate over all values in the set', () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - - let iterations = 0; - for (let key of set.values()) { - expect(set.has(key)).toBe(true); - iterations++; - } - - expect(iterations).toBe(set.size); - }); - }); - - describe('#[Symbol.iterator]', () => { - it('should iterate over all values in the set', () => { - const set = new NumberValueSet([ - new NumberValue('1'), - new NumberValue('2'), - ]); - - let iterations = 0; - for (let key of set) { - expect(set.has(key)).toBe(true); - iterations++; - } - - expect(iterations).toBe(set.size); - }); - }); - - describe('#[Symbol.toStringTag]', () => { - it('should return a static value of "Set"', () => { - expect(new NumberValueSet()[Symbol.toStringTag]).toBe('Set'); - }); - - it('should cause toString to return a Set-identifying string', () => { - expect(Object.prototype.toString.call(new NumberValueSet())) - .toBe('[object Set]'); - }); - }); -}); diff --git a/packages/dynamodb-auto-marshaller/src/NumberValueSet.ts b/packages/dynamodb-auto-marshaller/src/NumberValueSet.ts deleted file mode 100644 index 9cbe8e7f..00000000 --- a/packages/dynamodb-auto-marshaller/src/NumberValueSet.ts +++ /dev/null @@ -1,46 +0,0 @@ -import {ObjectSet} from "./ObjectSet"; -import {NumberValue} from "./NumberValue"; - -/** - * A set of numeric values represented internally as NumberValue objects. - * Equality is determined by the string representation of the number and not by - * the identity or data type of the provided value. - */ -export class NumberValueSet extends ObjectSet { - /** - * @inheritDoc - * - * If a number or string is provided, it will be converted to a NumberValue - * object. - */ - add(value: NumberValue|number|string) { - if (typeof value === 'number' || typeof value === 'string') { - value = new NumberValue(value); - } - - super.add(value); - return this; - } - - delete(value: NumberValue|number|string): boolean { - const valueString = value.toString(); - const scrubbedValues = this._values - .filter(item => item.toString() !== valueString); - - const numRemoved = this._values.length - scrubbedValues.length; - this._values = scrubbedValues; - - return numRemoved > 0; - } - - has(value: NumberValue|number|string): boolean { - const valueString = value.toString(); - for (let item of this) { - if (item.toString() === valueString) { - return true; - } - } - - return false; - } -} diff --git a/packages/dynamodb-auto-marshaller/src/ObjectSet.ts b/packages/dynamodb-auto-marshaller/src/ObjectSet.ts deleted file mode 100644 index e6bc1909..00000000 --- a/packages/dynamodb-auto-marshaller/src/ObjectSet.ts +++ /dev/null @@ -1,119 +0,0 @@ -export abstract class ObjectSet implements Set { - /** - * Returns the string literal 'Set' for use by Object.prototype.toString. - * This allows for identifying Sets without checking constructor identity. - */ - public readonly [Symbol.toStringTag]: 'Set' = 'Set'; - - protected _values: Array = []; - - /** - * Creates a new ObjectSet and optionally seeds it with values. - * - * @param iterable An optional iterable of values to add to the set. - */ - constructor(iterable?: Iterable) { - if (iterable) { - for (let item of iterable) { - this.add(item); - } - } - } - - /** - * Add a value to the set. If the value is already contained in the set, it - * will not be added a second time. - * - * @param value The value to add - */ - add(value: T): this { - if (!this.has(value)) { - this._values.push(value); - } - - return this; - } - - /** - * Remove all values from the set. - */ - clear(): void { - this._values = []; - } - - /** - * Removes a particular value from the set. If the value was contained in - * the set prior to this method being called, `true` will be returned; if - * the value was not in the set, `false` will be returned. In either case, - * the value provided will not be in the set after this method returns. - * - * @param value The value to remove from the set. - */ - abstract delete(value: T): boolean; - - /** - * Returns an iterable two-member tuples for each item in the set, where - * the item is provided twice. - * - * Part of the ES2015 Set specification for compatibility with Map objects. - */ - entries(): IterableIterator<[T, T]> { - return this._values.map<[T, T]>( - value => [value, value] - )[Symbol.iterator](); - } - - /** - * Invokes a callback once for each member of the set. - * - * @param callback The function to invoke with each set member - * @param thisArg The `this` context on which to invoke the callback - */ - forEach( - callback: ( - value: T, - value2: T, - set: Set - ) => void, - thisArg?: any - ): void { - this._values.forEach((value, index, array) => { - callback.call(thisArg, value, value, this); - }, thisArg); - } - - /** - * Determines if a provided value is already a member of the set. - * - * @param value The value against which set members should be checked - */ - abstract has(value: T): boolean; - - /** - * Returns an IterableIterator of each member of the set. - */ - keys(): IterableIterator { - return this[Symbol.iterator](); - } - - /** - * Returns the number of members in the set. - */ - get size(): number { - return this._values.length; - } - - /** - * Returns an IterableIterator of each member of the set. - */ - values(): IterableIterator { - return this[Symbol.iterator](); - } - - /** - * Returns an IterableIterator of each member of the set. - */ - [Symbol.iterator](): IterableIterator { - return this._values[Symbol.iterator](); - } -} diff --git a/packages/dynamodb-auto-marshaller/src/index.ts b/packages/dynamodb-auto-marshaller/src/index.ts deleted file mode 100644 index 62a6f2f4..00000000 --- a/packages/dynamodb-auto-marshaller/src/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -export * from './BinarySet'; -export * from './Marshaller'; -export * from './NumberValue'; -export * from './NumberValueSet'; diff --git a/packages/dynamodb-auto-marshaller/src/isArrayBuffer.spec.ts b/packages/dynamodb-auto-marshaller/src/isArrayBuffer.spec.ts deleted file mode 100644 index 3d3a92fc..00000000 --- a/packages/dynamodb-auto-marshaller/src/isArrayBuffer.spec.ts +++ /dev/null @@ -1,37 +0,0 @@ -import {isArrayBuffer} from "./isArrayBuffer"; - -describe('isArrayBuffer', () => { - const arrayBufferConstructor = ArrayBuffer; - - afterEach(() => { - (ArrayBuffer as any) = arrayBufferConstructor; - }); - - it('should return true for ArrayBuffer objects', () => { - expect(isArrayBuffer(new ArrayBuffer(0))).toBe(true); - }); - - it('should return false for ArrayBufferView objects', () => { - const view = new Uint8Array(0); - - expect(isArrayBuffer(view)).toBe(false); - expect(isArrayBuffer(view.buffer)).toBe(true); - }); - - it('should return false for scalar values', () => { - for (let scalar of ['string', 123.234, true, null, void 0]) { - expect(isArrayBuffer(scalar)).toBe(false); - } - }); - - it( - 'should return true for ArrayBuffers created with a different instance of the ArrayBuffer constructor', - () => { - const buffer = new ArrayBuffer(0); - (ArrayBuffer as any) = () => buffer; - - expect(buffer).not.toBeInstanceOf(ArrayBuffer); - expect(isArrayBuffer(buffer)).toBe(true); - } - ); -}); diff --git a/packages/dynamodb-auto-marshaller/src/isArrayBuffer.ts b/packages/dynamodb-auto-marshaller/src/isArrayBuffer.ts deleted file mode 100644 index 30e1ca5e..00000000 --- a/packages/dynamodb-auto-marshaller/src/isArrayBuffer.ts +++ /dev/null @@ -1,8 +0,0 @@ -/** - * Determines if the provided argument is an ArrayBuffer object. Compatible with - * ArrayBuffers created in separate iframes and VMs. - */ -export function isArrayBuffer(arg: any): arg is ArrayBuffer { - return (typeof ArrayBuffer === 'function' && arg instanceof ArrayBuffer) || - Object.prototype.toString.call(arg) === '[object ArrayBuffer]'; -} diff --git a/packages/dynamodb-auto-marshaller/tsconfig.json b/packages/dynamodb-auto-marshaller/tsconfig.json deleted file mode 100644 index 303c06b1..00000000 --- a/packages/dynamodb-auto-marshaller/tsconfig.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": [ - "es5", - "es2015.iterable", - "es2015.promise", - "es2015.collection", - "es2015.symbol.wellknown" - ], - "downlevelIteration": true, - "importHelpers": true, - "module": "commonjs", - "strict": true, - "declaration": true, - "rootDir": "./src", - "outDir": "./build" - }, - "typedocOptions": { - "mode": "file", - "out": "../../docs/packages/dynamodb-auto-marshaller", - "excludeNotExported": true, - "excludePrivate": true, - "hideGenerator": true - } -} diff --git a/packages/dynamodb-auto-marshaller/tsconfig.test.json b/packages/dynamodb-auto-marshaller/tsconfig.test.json deleted file mode 100644 index 4e803a14..00000000 --- a/packages/dynamodb-auto-marshaller/tsconfig.test.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "inlineSourceMap": true, - "inlineSources": true, - "rootDir": "./src", - "outDir": "./build" - } -} diff --git a/packages/dynamodb-batch-iterator/.npmignore b/packages/dynamodb-batch-iterator/.npmignore deleted file mode 100644 index 1d116ecc..00000000 --- a/packages/dynamodb-batch-iterator/.npmignore +++ /dev/null @@ -1,10 +0,0 @@ -/src -/node_modules -/coverage - -*.spec.d.ts -*.spec.js -*.spec.js.map - -tsconfig.json -tsconfig.test.json diff --git a/packages/dynamodb-batch-iterator/CHANGELOG.md b/packages/dynamodb-batch-iterator/CHANGELOG.md deleted file mode 100644 index 0129214b..00000000 --- a/packages/dynamodb-batch-iterator/CHANGELOG.md +++ /dev/null @@ -1,20 +0,0 @@ -# Changelog -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). - -## [0.7.1] -Remove package rollup at `./build/index.mjs` due to bundler incompatibilities. - -## [0.7.0] -Add a package rollup at `./build/index.mjs` to support tree shaking. - -## [0.3.1] -### Fixed - - When the source for a batch operation is a synchronous iterable, exhaust the - source before interleaving throttled items. - - When a write is returned as unprocessed, do not yield the marshalled form. - -## [0.3.0] -Initial release diff --git a/packages/dynamodb-batch-iterator/LICENSE b/packages/dynamodb-batch-iterator/LICENSE deleted file mode 100644 index da05f5c9..00000000 --- a/packages/dynamodb-batch-iterator/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2017 Amazon.com, Inc. or its affiliates - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/dynamodb-batch-iterator/README.md b/packages/dynamodb-batch-iterator/README.md deleted file mode 100644 index 2af8c550..00000000 --- a/packages/dynamodb-batch-iterator/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# Amazon DynamoDB Batch Iteration - -[![Apache 2 License](https://img.shields.io/github/license/awslabs/dynamodb-data-mapper-js.svg?style=flat)](http://aws.amazon.com/apache-2-0/) - -This library provides utilities for automatically submitting arbitrarily-sized -batches of reads and writes to DynamoDB using well-formed `BatchGetItem` and -`BatchWriteItem` operations, respectively. Partial successes (i.e., -`BatchGetItem` operations that return some responses and some unprocessed keys -or `BatchWriteItem` operations that return some unprocessed items) will retry -the unprocessed items automatically using exponential backoff. - -## Getting started - -### Reading batches of items - -Create a `BatchGet` object, supplying an instantiated DynamoDB client from the -AWS SDK for JavaScript and an iterable of keys that you wish to retrieve. The -iterable may be synchronous (such as an array) or asynchronous (such as an -object stream wrapped with [async-iter-stream](https://github.com/calvinmetcalf/async-iter-stream)'s -`wrap` method). - -```typescript -import { BatchGet } from '@aws/dynamodb-batch-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const dynamoDb = new DynamoDB({region: 'us-west-2'}); -const keys = [ - ['tableName', {keyProperty: {N: '0'}}], - ['tableName', {keyProperty: {N: '1'}}], - ['tableName', {keyProperty: {N: '2'}}], - // etc., continuing to count up to - ['tableName', {keyProperty: {N: '1001'}}], -]; - -for await (const item of new BatchGet(dynamoDb, keys)) { - console.log(item); -} -``` - -The above code snippet will automatically split the provided keys into -`BatchGetItem` requests of 100 or fewer keys, and any unprocessed keys will be -automatically retried until they are handled. The above code will execute at -least 11 `BatchGetItem` operations, dependening on how many items are returned -without processing due to insufficient provisioned read capacity. - -Each item yielded in the `for...await...of` loop will be a single DynamoDB -record. Iteration will stop once each key has been retrieved or an error has -been encountered. - -### Writing batches of items - -Create a `BatchWrite` object, supplying an instantiated DynamoDB client from the -AWS SDK for JavaScript and an iterable of write requests that you wish to -execute. The iterable may be synchronous (such as an array) or asynchronous -(such as an object stream wrapped with [async-iter-stream](https://github.com/calvinmetcalf/async-iter-stream)'s -`wrap` method). - -Each write request should contain either a `DeleteRequest` key or a `PutRequest` -key as described [in the Amazon DynamoDB API reference](http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_WriteRequest.html#DDB-Type-WriteRequest-DeleteRequest). - -```typescript -import { BatchWrite } from '@aws/dynamodb-batch-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const dynamoDb = new DynamoDB({region: 'us-west-2'}); -const keys = [ - ['tableName', {DeleteRequest: {Key: {keyProperty: {N: '0'}}}}], - ['tableName', {PutRequest: {Item: {keyProperty: {N: '1'}, otherProperty: {BOOL: false}}}}], - ['tableName', {DeleteRequest: {Key: {keyProperty: {N: '2'}}}}], - ['tableName', {PutRequest: {Item: {keyProperty: {N: '3'}, otherProperty: {BOOL: false}}}}], - ['tableName', {N: '2'}], - // etc., continuing to count up to - ['tableName', {DeleteRequest: {Key: {keyProperty: {N: '102'}}}}], -]; - -for await (const item of new BatchWrite(dynamoDb, keys)) { - console.log(item); -} -``` - -The above code snippet will automatically split the provided keys into -`BatchWriteItem` requests of 25 or fewer write request objects, and any -unprocessed request objects will be automatically retried until they are -handled. The above code will execute at least 5 `BatchWriteItem` operations, -dependening on how many items are returned without processing due to -insufficient provisioned write capacity. - -Each item yielded in the `for...await...of` loop will be a single write request -that has succeeded. Iteration will stop once each request has been handled or an -error has been encountered. diff --git a/packages/dynamodb-batch-iterator/package.json b/packages/dynamodb-batch-iterator/package.json deleted file mode 100644 index 9f3ed62b..00000000 --- a/packages/dynamodb-batch-iterator/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "@aws/dynamodb-batch-iterator", - "version": "0.7.1", - "description": "Abstraction for DynamoDB batch reads and writes for that handles batch splitting and partial retries with exponential backoff", - "keywords": [ - "aws", - "dynamodb" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" - }, - "bugs": { - "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" - }, - "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-batch-iterator/", - "main": "./build/index.js", - "types": "./build/index.d.ts", - "scripts": { - "docs": "typedoc src", - "prepublishOnly": "tsc", - "pretest": "tsc -p tsconfig.test.json", - "test": "jest \"build/(.+).spec.js\"" - }, - "author": { - "name": "AWS SDK for JavaScript Team", - "email": "aws-sdk-js@amazon.com" - }, - "license": "Apache-2.0", - "devDependencies": { - "@types/jest": "^24", - "@types/node": "^8.0.4", - "aws-sdk": "^2.7.0", - "jest": "^24", - "typedoc": "^0.14.0", - "typescript": "^3.4" - }, - "peerDependencies": { - "aws-sdk": "^2.7.0" - }, - "dependencies": { - "tslib": "^1.9", - "utf8-bytes": "^0.0.1" - } -} diff --git a/packages/dynamodb-batch-iterator/src/BatchOperation.ts b/packages/dynamodb-batch-iterator/src/BatchOperation.ts deleted file mode 100644 index 7b9db9a8..00000000 --- a/packages/dynamodb-batch-iterator/src/BatchOperation.ts +++ /dev/null @@ -1,244 +0,0 @@ -import { - BatchState, - SyncOrAsyncIterable, - TableState, - TableStateElement, - ThrottledTableConfiguration, -} from './types'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -if (Symbol && !Symbol.asyncIterator) { - (Symbol as any).asyncIterator = Symbol.for("__@@asyncIterator__"); -} - -export abstract class BatchOperation< - Element extends TableStateElement -> implements AsyncIterableIterator<[string, Element]> { - /** - * The maximum number of elements that may be included in a single batch. - */ - protected abstract readonly batchSize: number; - - /** - * Items that have been retrieved and are ready to be returned. - */ - protected readonly pending: Array<[string, Element]> = []; - - /** - * A mapping of table names to table-specific operation state (e.g., the - * number of throttling events experienced, etc.) - */ - protected readonly state: BatchState = {}; - - /** - * Input elements that are prepared for immediate dispatch - */ - protected readonly toSend: Array<[string, Element]> = []; - - private readonly throttled = new Set>>(); - private readonly iterator: Iterator<[string, Element]>|AsyncIterator<[string, Element]>; - private sourceDone: boolean = false; - private sourceNext: IteratorResult<[string, Element]>|Promise>; - private lastResolved?: Promise>; - - /** - * @param client The AWS SDK client with which to communicate with - * DynamoDB. - * @param items A synchronous or asynchronous iterable of tuples - * describing the operations to execute. The first member - * of the tuple should be the name of the table targeted by - * the operation. - */ - constructor( - protected readonly client: DynamoDB, - items: SyncOrAsyncIterable<[string, Element]> - ) { - if (isIterable(items)) { - this.iterator = items[Symbol.iterator](); - } else { - this.iterator = items[Symbol.asyncIterator](); - } - this.sourceNext = this.iterator.next(); - } - - next(): Promise> { - if (this.lastResolved) { - this.lastResolved = this.lastResolved.then(() => this.getNext()); - } else { - this.lastResolved = this.getNext(); - } - - return this.lastResolved; - } - - [Symbol.asyncIterator]() { - return this; - } - - /** - * Execute a single batch request and process the result. - */ - protected abstract doBatchRequest(): Promise; - - /** - * Create and return the initial state object for a given DynamoDB table. - * - * @param tableName The name of the table whose initial state should be - * returned. - */ - protected getInitialTableState(tableName: string): TableState { - return { - backoffFactor: 0, - name: tableName, - }; - } - - /** - * Accept an array of unprocessed items belonging to a single table and - * re-enqueue it for submission, making sure the appropriate level of - * backoff is applied to future operations on the same table. - * - * @param tableName The table to which the unprocessed elements belong. - * @param unprocessed Elements returned by DynamoDB as not yet processed. - * The elements should not be unmarshalled, but they - * should be reverted to the form used for elements - * that have not yet been sent. - */ - protected handleThrottled( - tableName: string, - unprocessed: Array - ): void { - const tableState = this.state[tableName]; - tableState.backoffFactor++; - - if (tableState.tableThrottling) { - this.throttled.delete(tableState.tableThrottling.backoffWaiter); - unprocessed.unshift(...tableState.tableThrottling.unprocessed); - } - - tableState.tableThrottling = { - unprocessed, - backoffWaiter: new Promise(resolve => { - setTimeout( - resolve, - exponentialBackoff(tableState.backoffFactor), - tableState - ); - }) - }; - - this.throttled.add(tableState.tableThrottling.backoffWaiter); - } - - /** - * Iterate over all pending writes and move those targeting throttled tables - * into the throttled queue. - * - * @param unprocessedTables A set of tables for which some items were - * returned without being processed. - */ - protected movePendingToThrottled(unprocessedTables: Set) { - for (let i = this.toSend.length - 1; i > -1; i--) { - const [table, attributes] = this.toSend[i]; - if (unprocessedTables.has(table)) { - (this.state[table] as ThrottledTableConfiguration) - .tableThrottling.unprocessed.push(attributes); - this.toSend.splice(i, 1); - } - } - } - - private addToSendQueue([tableName, attributes]: [string, Element]): void { - if (!this.state[tableName]) { - this.state[tableName] = this.getInitialTableState(tableName); - } - const tableState = this.state[tableName]; - - if (tableState.tableThrottling) { - tableState.tableThrottling.unprocessed.push(attributes); - } else { - this.toSend.push([tableName, attributes]); - } - } - - private enqueueThrottled( - table: ThrottledTableConfiguration - ): void { - const { - tableThrottling: {backoffWaiter, unprocessed} - } = table; - if (unprocessed.length > 0) { - this.toSend.push(...unprocessed.map( - attr => [table.name, attr] as [string, Element] - )); - } - - this.throttled.delete(backoffWaiter); - delete table.tableThrottling; - } - - private async getNext(): Promise> { - if ( - this.sourceDone && - this.pending.length === 0 && - this.toSend.length === 0 && - this.throttled.size === 0 - ) { - return {done: true} as IteratorResult<[string, Element]>; - } - - if (this.pending.length > 0) { - return { - done: false, - value: this.pending.shift() as [string, Element] - }; - } - - await this.refillPending(); - return this.getNext(); - } - - private async refillPending() { - while ( - !this.sourceDone && - this.toSend.length < this.batchSize - ) { - const toProcess = isIteratorResult(this.sourceNext) - ? this.sourceNext - : await Promise.race([ - this.sourceNext, - Promise.race(this.throttled) - ]); - - if (isIteratorResult(toProcess)) { - this.sourceDone = toProcess.done; - if (!this.sourceDone) { - this.addToSendQueue(toProcess.value); - this.sourceNext = this.iterator.next(); - } - } else { - this.enqueueThrottled(toProcess); - } - } - - while (this.toSend.length < this.batchSize && this.throttled.size > 0) { - this.enqueueThrottled(await Promise.race(this.throttled)); - } - - if (this.toSend.length > 0) { - await this.doBatchRequest(); - } - } -} - -function exponentialBackoff(attempts: number) { - return Math.floor(Math.random() * Math.pow(2, attempts)); -} - -function isIterable(arg: any): arg is Iterable { - return Boolean(arg) && typeof arg[Symbol.iterator] === 'function'; -} - -function isIteratorResult(arg: any): arg is IteratorResult { - return Boolean(arg) && typeof arg.done === 'boolean'; -} diff --git a/packages/dynamodb-batch-iterator/src/index.ts b/packages/dynamodb-batch-iterator/src/index.ts deleted file mode 100644 index f067865d..00000000 --- a/packages/dynamodb-batch-iterator/src/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -export * from './BatchGet'; -export * from './BatchGetOptions'; -export * from './BatchWrite'; -export * from './types'; diff --git a/packages/dynamodb-batch-iterator/src/types.ts b/packages/dynamodb-batch-iterator/src/types.ts deleted file mode 100644 index 041486f5..00000000 --- a/packages/dynamodb-batch-iterator/src/types.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { - AttributeMap, - ConsistentRead, - DeleteRequest, - ExpressionAttributeNameMap, - ProjectionExpression, - PutRequest, - WriteRequest as DynamoDbWriteRequest -} from "aws-sdk/clients/dynamodb"; - -/** - * A synchronous or asynchronous iterable. - */ -export type SyncOrAsyncIterable = Iterable|AsyncIterable; - -/** - * @internal - */ -export interface BatchState { - [tableName: string]: TableState; -} - -/** - * @internal - */ -export interface TableState { - attributeNames?: ExpressionAttributeNameMap; - backoffFactor: number; - consistentRead?: ConsistentRead; - name: string; - projection?: ProjectionExpression; - tableThrottling?: TableThrottlingTracker; -} - -/** - * @internal - */ -export type TableStateElement = AttributeMap|WriteRequest; - -/** - * @internal - */ -export interface TableThrottlingTracker { - backoffWaiter: Promise>; - unprocessed: Array; -} - -/** - * @internal - */ -export interface ThrottledTableConfiguration< - Element extends TableStateElement -> extends TableState { - tableThrottling: TableThrottlingTracker; -} - -/** - * A write request for which exactly one of the `PutRequest` and `DeleteRequest` - * properties has been defined. - */ -export type WriteRequest = - DynamoDbWriteRequest & { PutRequest: PutRequest, DeleteRequest?: undefined } | - DynamoDbWriteRequest & { DeleteRequest: DeleteRequest, PutRequest?: undefined }; diff --git a/packages/dynamodb-batch-iterator/tsconfig.json b/packages/dynamodb-batch-iterator/tsconfig.json deleted file mode 100644 index 5cfd45fa..00000000 --- a/packages/dynamodb-batch-iterator/tsconfig.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": [ - "es5", - "es2015.iterable", - "es2015.promise", - "es2015.collection", - "es2015.symbol.wellknown", - "esnext.asynciterable" - ], - "downlevelIteration": true, - "importHelpers": true, - "module": "commonjs", - "noUnusedLocals": true, - "strict": true, - "declaration": true, - "sourceMap": true, - "rootDir": "./src", - "outDir": "./build" - }, - "typedocOptions": { - "mode": "file", - "out": "../../docs/packages/dynamodb-batch-iterator", - "excludeNotExported": true, - "excludePrivate": true, - "hideGenerator": true - } -} diff --git a/packages/dynamodb-data-mapper-annotations/.npmignore b/packages/dynamodb-data-mapper-annotations/.npmignore deleted file mode 100644 index 1d116ecc..00000000 --- a/packages/dynamodb-data-mapper-annotations/.npmignore +++ /dev/null @@ -1,10 +0,0 @@ -/src -/node_modules -/coverage - -*.spec.d.ts -*.spec.js -*.spec.js.map - -tsconfig.json -tsconfig.test.json diff --git a/packages/dynamodb-data-mapper-annotations/LICENSE b/packages/dynamodb-data-mapper-annotations/LICENSE deleted file mode 100644 index da05f5c9..00000000 --- a/packages/dynamodb-data-mapper-annotations/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2017 Amazon.com, Inc. or its affiliates - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/dynamodb-data-mapper-annotations/README.md b/packages/dynamodb-data-mapper-annotations/README.md deleted file mode 100644 index 10a05ccc..00000000 --- a/packages/dynamodb-data-mapper-annotations/README.md +++ /dev/null @@ -1,176 +0,0 @@ -# Amazon DynamoDB DataMapper Annotations - -[![Apache 2 License](https://img.shields.io/github/license/awslabs/dynamodb-data-mapper-js.svg?style=flat)](http://aws.amazon.com/apache-2-0/) - -This library provides annotations to allow easy integration of domain classes -with the `DataMapper` defined in `@aws/dynamodb-data-mapper`. These annotations -are provided in a separate NPM package because they rely on two experimental -features of TypeScript (decorators and metadata decoration) and depend on the -`reflect-metadata` package. - -## Getting started - -To use the annotations, you will first need to enable two TypeScript compiler -settings: `experimentalDecorators` and `emitDecoratorMetadata`. The former -enables the use of annotations in your project, and the latter emits type -information about declared property classes. - -Next, start adding `table` annotations to the domain models that represent -records in your DynamoDB tables and attribute annotations to the declared -properties that map to attributes of those records: - -```typescript -import { - attribute, - autoGeneratedHashKey, - rangeKey, - table, - versionAttribute, -} from '@aws/dynamodb-data-mapper-annotations'; -import uuidV4 = require('uuid/v4'); - -@table('my_table') -class MyDomainClass { - @autoGeneratedHashKey() - id: string; - - @rangeKey({defaultProvider: () => new Date()}) - createdAt: Date; - - @versionAttribute() - version: number; - - @attribute() - toggle?: boolean; - - @attribute({memberType: 'String'}) - tags?: Set; - - // This property will not be saved to DynamoDB. - notPersistedToDynamoDb: string; -} -``` - -Using these annotations will automatically define properties at the -`DynamoDbSchema` and `DynamoDbTable` symbols on the class prototype, meaning -that you can pass instances of the class directly to a `DataMapper` instance. No -need to define a schema separately; your class's property type declarations are -the schema. - -To declare a class corresponding to a map attribute value in a DynamoDB record, -simply omit the `table` annotation: - -```typescript -import { - attribute, - hashKey, - table, -} from '@aws/dynamodb-data-mapper-annotations'; -import {embed} from '@aws/dynamodb-data-mapper'; - -class Comment { - @attribute() - author?: string; - - @attribute() - postedAt?: Date; - - @attribute() - text?: string; -} - -@table('posts') -class BlogPost { - @hashKey() - id: string; - - @attribute() - author?: string; - - @attribute() - postedAt?: Date; - - @attribute() - text?: string; - - @attribute({memberType: embed(Comment)}) - replies?: Array -} -``` - -## Supported Annotations - -### `attribute` - -Marks a property as an attribute in the mapped DynamoDB record. The annotation -will attempt to infer the correct DynamoDB type from the TypeScript type -metadata, though no metadata will be available for any generic type parameters -used. - -`attribute` must be called as a function and accepts an optional argument -containing a partial or complete field schema definition. - -### `autoGeneratedHashKey` - -Designates the property as representing the record's hash key. If no key is -defined, a V4 UUID will be used. - -`autoGeneratedHashKey` must be called as a function and accepts an optional -argument containing a partial or complete field schema definition. - -### `hashKey` - -Designates the property as representing the record's hash (partition) key. - -`hashKey` must be called as a function and accepts an optional argument -containing a partial or complete field schema definition. - -### `rangeKey` - -Designates the property as representing the record's range (sort) key. - -`rangeKey` must be called as a function and accepts an optional argument -containing a partial or complete field schema definition. - -### `table` - -Designates a class as representing a table in DynamoDB. - -`table` must be called as a function with that table name as the only argument. - -### `versionAttribute` - -Designates the property as representing the record's version attribute. The data -mapper will use optimistic locking by incrementing the version attribute during -`put` and `update` operations and sending a condition expression with all `put`, -`update`, and `delete` operations that will cause the operation to fail if the -record in the database has a version number that does not match the one held by -the client. - -`versionAttribute` must be called as a function and accepts an optional argument -containing a partial or complete field schema definition. - -## Caveats - -### Reliance on experimental TypeScript features - -Because this package relies on experimental TypeScript features, it is subject -to change and should be considered beta software. Decorators are currently on -the ECMAScript standards track, but the accepted form may differ from that -implemented by TypeScript, which could cause the public interface presented by -this project to change. - -### Lack of type information in generics - -Please note that TypeScript does not emit any metadata about the type parameters -supplied to generic types, so `Array`, `[number, string]`, and -`MyClass[]` are all exposed as `Array` via the emitted metadata. Without -additional metadata, this annotation will treat all encountered arrays as -collections of untyped data. You may supply either a `members` declaration or a -`memberType` declaration to direct this annotation to treat a property as a -tuple or typed list, respectively. - -Member type declarations are required for maps and sets, though the member type -will be automatically inferred if a property is declared as being of type -`BinarySet` or `NumberValueSet` (from the `@aws/dynamodb-auto-marshaller` -package). diff --git a/packages/dynamodb-data-mapper-annotations/package.json b/packages/dynamodb-data-mapper-annotations/package.json deleted file mode 100644 index d91aacd1..00000000 --- a/packages/dynamodb-data-mapper-annotations/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "@aws/dynamodb-data-mapper-annotations", - "version": "0.7.3", - "description": "Annotations providing easy integration between TypeScript domain objects and the @aws/dynamodb-data-mapper library", - "keywords": [ - "aws", - "dynamodb" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" - }, - "bugs": { - "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" - }, - "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-data-mapper-annotations/", - "main": "./build/index.js", - "types": "./build/index.d.ts", - "scripts": { - "build": "tsc", - "docs": "typedoc src", - "integ": "npm run pretest && jest --config=jest.integration.js", - "prepublishOnly": "npm run build", - "pretest": "tsc -p tsconfig.test.json", - "test": "jest \"build/(.+).spec.js\"" - }, - "author": { - "name": "AWS SDK for JavaScript Team", - "email": "aws-sdk-js@amazon.com" - }, - "license": "Apache-2.0", - "devDependencies": { - "@types/jest": "^24", - "@types/node": "^8.0.4", - "@types/uuid": "^3.0.0", - "aws-sdk": "^2.7.0", - "jest": "^24", - "typedoc": "^0.14.0", - "typescript": "^3.4" - }, - "dependencies": { - "@aws/dynamodb-auto-marshaller": "^0.7.1", - "@aws/dynamodb-data-mapper": "^0.7.3", - "@aws/dynamodb-data-marshaller": "^0.7.3", - "reflect-metadata": "^0.1.10", - "tslib": "^1.9", - "uuid": "^3.0.0" - } -} diff --git a/packages/dynamodb-data-mapper-annotations/src/annotationShapes.ts b/packages/dynamodb-data-mapper-annotations/src/annotationShapes.ts deleted file mode 100644 index 233502e4..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/annotationShapes.ts +++ /dev/null @@ -1,9 +0,0 @@ -import {ZeroArgumentsConstructor} from '@aws/dynamodb-data-marshaller'; - -export interface ClassAnnotation { - (target: ZeroArgumentsConstructor): void; -} - -export interface PropertyAnnotation { - (target: Object, propertyKey: string|symbol): void; -} \ No newline at end of file diff --git a/packages/dynamodb-data-mapper-annotations/src/attribute.spec.ts b/packages/dynamodb-data-mapper-annotations/src/attribute.spec.ts deleted file mode 100644 index 7cc6d5fa..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/attribute.spec.ts +++ /dev/null @@ -1,464 +0,0 @@ -import {attribute} from './attribute'; -import {METADATA_TYPE_KEY} from './constants'; -import {BinarySet, NumberValueSet} from "@aws/dynamodb-auto-marshaller"; -import {DynamoDbSchema} from '@aws/dynamodb-data-mapper'; -import {isSchema, SchemaType} from '@aws/dynamodb-data-marshaller'; - -describe('attribute', () => { - it( - 'should create a document schema compatible with the DynamoDbSchema protocol', - () => { - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(isSchema(target[DynamoDbSchema])).toBe(true); - } - ); - - it( - 'should bind the provided field schema to the document schema bound to the target object', - () => { - const expected: SchemaType = { - type: 'Number', - versionAttribute: true - }; - const decorator = attribute(expected); - const target = Object.create(null); - decorator(target, 'property1'); - decorator(target, 'property2'); - - expect(target[DynamoDbSchema]).toEqual({ - property1: expected, - property2: expected, - }); - } - ); - - it( - 'should throw an error if a keyType is set on a schema node that is not a valid key', - () => { - const expected: any = { - type: 'Boolean', - keyType: 'HASH' - }; - const decorator = attribute(expected); - expect(() => decorator(Object.create(null), 'property')).toThrow(); - } - ); - - it( - 'should throw an error if index key configurations are set on a schema node that is not a valid key', - () => { - const expected: any = { - type: 'Boolean', - indexKeyConfigurations: { - indexName: 'HASH' - } - }; - const decorator = attribute(expected); - expect(() => decorator(Object.create(null), 'property')).toThrow(); - } - ); - - it('should support branching inheritance', () => { - abstract class Foo { - @attribute() - prop?: string; - } - - class Bar extends Foo { - @attribute() - otherProp?: number; - } - - class Baz extends Foo { - @attribute() - yetAnotherProp?: boolean; - } - - const bar = new Bar(); - expect((bar as any)[DynamoDbSchema]).toEqual({ - prop: {type: 'String'}, - otherProp: {type: 'Number'}, - }); - - const baz = new Baz(); - expect((baz as any)[DynamoDbSchema]).toEqual({ - prop: {type: 'String'}, - yetAnotherProp: {type: 'Boolean'}, - }); - }); - - it('should support multiple inheritance levels', () => { - class Foo { - @attribute() - prop?: string; - } - - class Bar extends Foo { - @attribute() - otherProp?: number; - } - - class Baz extends Bar { - @attribute() - yetAnotherProp?: boolean; - } - - const foo = new Foo(); - expect((foo as any)[DynamoDbSchema]).toEqual({ - prop: {type: 'String'}, - }); - const bar = new Bar(); - expect((bar as any)[DynamoDbSchema]).toEqual({ - prop: {type: 'String'}, - otherProp: {type: 'Number'}, - }); - - const baz = new Baz(); - expect((baz as any)[DynamoDbSchema]).toEqual({ - prop: {type: 'String'}, - otherProp: {type: 'Number'}, - yetAnotherProp: {type: 'Boolean'}, - }); - }); - - describe('TypeScript decorator metadata integration', () => { - const originalGetMetadata = Reflect.getMetadata; - - beforeEach(() => { - Reflect.getMetadata = jest.fn(); - }); - - afterEach(() => { - Reflect.metadata = originalGetMetadata; - }); - - it( - `should read the ${METADATA_TYPE_KEY} metadata key used by TypeScript's decorator metadata integration`, - () => { - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - const reflectionCalls = (Reflect.getMetadata as any).mock.calls; - expect(reflectionCalls.length).toBe(1); - expect(reflectionCalls[0][0]).toBe(METADATA_TYPE_KEY); - expect(reflectionCalls[0][1]).toBe(target); - expect(reflectionCalls[0][2]).toBe('property'); - } - ); - - it( - `should recognize values with a constructor of String as a string`, - () => { - (Reflect.getMetadata as any).mockImplementation(() => String); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'String'}); - } - ); - - it( - `should recognize values with a constructor of Number as a number`, - () => { - (Reflect.getMetadata as any).mockImplementation(() => Number); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Number'}); - } - ); - - it( - `should recognize values with a constructor of Boolean as a boolean`, - () => { - (Reflect.getMetadata as any).mockImplementation(() => Boolean); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Boolean'}); - } - ); - - it( - `should recognize values with a constructor of Date as a date`, - () => { - (Reflect.getMetadata as any).mockImplementation(() => Date); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Date'}); - } - ); - - it( - `should recognize values with a constructor that subclasses Date as a date`, - () => { - class MyDate extends Date {} - (Reflect.getMetadata as any).mockImplementation(() => MyDate); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Date'}); - } - ); - - it( - `should recognize values with a constructor of BinarySet as a set of binary values`, - () => { - (Reflect.getMetadata as any).mockImplementation(() => BinarySet); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Set', memberType: 'Binary'}); - } - ); - - it( - `should recognize values with a constructor that subclasses BinarySet as a set of binary values`, - () => { - class MyBinarySet extends BinarySet {} - (Reflect.getMetadata as any).mockImplementation(() => MyBinarySet); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Set', memberType: 'Binary'}); - } - ); - - it( - `should recognize values with a constructor of NumberValueSet as a set of number values`, - () => { - (Reflect.getMetadata as any).mockImplementation(() => NumberValueSet); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Set', memberType: 'Number'}); - } - ); - - it( - `should recognize values with a constructor that subclasses NumberValueSet as a set of number values`, - () => { - class MyNumberValueSet extends NumberValueSet {} - (Reflect.getMetadata as any).mockImplementation(() => MyNumberValueSet); - - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Set', memberType: 'Number'}); - } - ); - - it(`should recognize values with a constructor of Set as a set`, () => { - (Reflect.getMetadata as any).mockImplementation(() => Set); - - const decorator = attribute({memberType: 'String'}); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Set', memberType: 'String'}); - }); - - it( - `should recognize values with a constructor that subclasses Set as a set`, - () => { - class MySet extends Set {} - (Reflect.getMetadata as any).mockImplementation(() => MySet); - - const decorator = attribute({memberType: 'Number'}); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Set', memberType: 'Number'}); - } - ); - - it( - `should throw on values with a constructor of Set that lack a memberType declaration`, - () => { - (Reflect.getMetadata as any).mockImplementation(() => Set); - - const decorator = attribute(); - expect(() => decorator({}, 'property')) - .toThrowError(/memberType/); - } - ); - - it(`should recognize values with a constructor of Map as a map`, () => { - (Reflect.getMetadata as any).mockImplementation(() => Map); - const memberType: SchemaType = { - type: 'Document', - members: {}, - }; - - const decorator = attribute({memberType}); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Map', memberType}); - }); - - it( - `should recognize values with a constructor that subclasses Map as a map`, - () => { - class MyMap extends Map {} - (Reflect.getMetadata as any).mockImplementation(() => MyMap); - const memberType: SchemaType = { - type: 'Tuple', - members: [ - {type: 'Boolean'}, - {type: 'String'}, - ] - }; - - const decorator = attribute({memberType}); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property) - .toEqual({type: 'Map', memberType}); - } - ); - - it( - `should throw on values with a constructor of Map that lack a memberType declaration`, - () => { - (Reflect.getMetadata as any).mockImplementation(() => Map); - - const decorator = attribute(); - expect(() => decorator({}, 'property')) - .toThrowError(/memberType/); - } - ); - - it( - 'should treat an object that adheres to the DynamoDbSchema protocol as a document', - () => { - class Document { - get [DynamoDbSchema]() { - return {}; - } - } - - (Reflect.getMetadata as any).mockImplementation(() => Document); - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property).toEqual({ - type: 'Document', - members: {}, - valueConstructor: Document, - }); - } - ); - - it('should treat arrays as collection types', () => { - (Reflect.getMetadata as any).mockImplementation(() => Array); - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property).toEqual({ - type: 'Collection', - }); - }); - - it( - 'should treat arrays with a declared memberType as list types', - () => { - (Reflect.getMetadata as any).mockImplementation(() => Array); - - const memberType: SchemaType = {type: 'String'}; - const decorator = attribute({memberType}); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property).toEqual({ - type: 'List', - memberType, - }); - } - ); - - it( - 'should treat arrays with members as tuple types', - () => { - (Reflect.getMetadata as any).mockImplementation(() => Array); - - const members: Array = [ - {type: 'Boolean'}, - {type: 'String'}, - ]; - const decorator = attribute({members}); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property).toEqual({ - type: 'Tuple', - members, - }); - } - ); - - it( - 'should constructors that descend from Array as collection types', - () => { - class MyArray extends Array {} - (Reflect.getMetadata as any).mockImplementation(() => MyArray); - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property).toEqual({ - type: 'Collection', - }); - } - ); - - it( - 'should treat values with an unrecognized constructor as an "Any" type', - () => { - (Reflect.getMetadata as any).mockImplementation(() => Object); - const decorator = attribute(); - const target = Object.create(null); - decorator(target, 'property'); - - expect(target[DynamoDbSchema].property).toEqual({type: 'Any'}); - } - ); - }); -}); diff --git a/packages/dynamodb-data-mapper-annotations/src/attribute.ts b/packages/dynamodb-data-mapper-annotations/src/attribute.ts deleted file mode 100644 index cdc640a5..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/attribute.ts +++ /dev/null @@ -1,209 +0,0 @@ -import 'reflect-metadata'; -import { PropertyAnnotation } from './annotationShapes'; -import { METADATA_TYPE_KEY } from './constants'; -import { BinarySet, NumberValueSet } from "@aws/dynamodb-auto-marshaller"; -import { DynamoDbSchema } from '@aws/dynamodb-data-mapper'; -import { - DocumentType, - KeyableType, - Schema, - SchemaType, - SetType -} from "@aws/dynamodb-data-marshaller"; - -/** - * Declare a property in a TypeScript class to be part of a DynamoDB schema. - * Meant to be used as a property decorator in conjunction with TypeScript's - * emitted type metadata. If used with in a project compiled with the - * `emitDecoratorMetadata` option enabled, the type will infer most types from - * the TypeScript source. - * - * Please note that TypeScript does not emit any metadata about the type - * parameters supplied to generic types, so `Array`, `[number, string]`, - * and `MyClass[]` are all exposed as `Array` via the emitted metadata. Without - * additional metadata, this annotation will treat all encountered arrays as - * collections of untyped data. You may supply either a `members` declaration or - * a `memberType` declaration to direct this annotation to treat a property as a - * tuple or typed list, respectively. - * - * Member type declarations are required for maps and sets. - * - * @see https://www.typescriptlang.org/docs/handbook/decorators.html - * @see https://www.typescriptlang.org/docs/handbook/compiler-options.html - * @see https://github.com/Microsoft/TypeScript/issues/2577 - * - * @example - * export class MyClass { - * @attribute() - * id: string; - * - * @attribute() - * subdocument?: MyOtherClass; - * - * @attribute() - * untypedCollection?: Array; - * - * @attribute({memberType: {type: 'String'}}) - * listOfStrings?: Array; - * - * @attribute({members: [{type: 'Boolean', type: 'String'}]}) - * tuple?: [boolean, string]; - * - * @attribute({memberType: {type: 'String'}}) - * mapStringString?: Map; - * - * @attribute() - * binary?: Uint8Array; - * } - */ -export function attribute( - parameters: Partial = {} -): PropertyAnnotation { - return (target, propertyKey) => { - if (!Object.prototype.hasOwnProperty.call(target, DynamoDbSchema)) { - Object.defineProperty( - target, - DynamoDbSchema as any, // TypeScript complains about the use of symbols here, though it should be allowed - {value: deriveBaseSchema(target)} - ); - } - - const schemaType = metadataToSchemaType( - Reflect.getMetadata(METADATA_TYPE_KEY, target, propertyKey), - parameters - ); - - if ( - ( - (schemaType as KeyableType).keyType || - (schemaType as KeyableType).indexKeyConfigurations - ) && - [ - 'Binary', - 'Custom', - 'Date', - 'Number', - 'String', - ].indexOf(schemaType.type) < 0 - ) { - throw new Error( - `Properties of type ${schemaType.type} may not be used as index or table keys. If you are relying on automatic type detection and have encountered this error, please ensure that the 'emitDecoratorMetadata' TypeScript compiler option is enabled. Please see https://www.typescriptlang.org/docs/handbook/decorators.html#metadata for more information on this compiler option.` - ); - } - - (target as any)[DynamoDbSchema][propertyKey] = schemaType; - }; -} - -function deriveBaseSchema(target: any): Schema { - if (target && typeof target === 'object') { - const prototype = Object.getPrototypeOf(target); - if (prototype) { - return { - ...deriveBaseSchema(prototype), - ...Object.prototype.hasOwnProperty.call(prototype, DynamoDbSchema) - ? prototype[DynamoDbSchema] - : {} - }; - } - } - - return {}; -} - -function metadataToSchemaType( - ctor: {new (): any}|undefined, - declaration: Partial -): SchemaType { - let {type, ...rest} = declaration; - if (type === undefined) { - if (ctor) { - if (ctor === String) { - type = 'String'; - } else if (ctor === Number) { - type = 'Number'; - } else if (ctor === Boolean) { - type = 'Boolean'; - } else if (ctor === Date || ctor.prototype instanceof Date) { - type = 'Date'; - } else if ( - ctor === BinarySet || - ctor.prototype instanceof BinarySet - ) { - type = 'Set'; - (rest as SetType).memberType = 'Binary'; - } else if ( - ctor === NumberValueSet || - ctor.prototype instanceof NumberValueSet - ) { - type = 'Set'; - (rest as SetType).memberType = 'Number'; - } else if (ctor === Set || ctor.prototype instanceof Set) { - type = 'Set'; - if (!('memberType' in rest)) { - throw new Error( - 'Invalid set declaration. You must specify a memberType' - ); - } - } else if (ctor === Map || ctor.prototype instanceof Map) { - type = 'Map'; - if (!('memberType' in rest)) { - throw new Error( - 'Invalid map declaration. You must specify a memberType' - ); - } - } else if (ctor.prototype[DynamoDbSchema]) { - type = 'Document'; - (rest as DocumentType).members = ctor.prototype[DynamoDbSchema]; - (rest as DocumentType).valueConstructor = ctor; - } else if (isBinaryType(ctor)) { - type = 'Binary'; - } else if (ctor === Array || ctor.prototype instanceof Array) { - if ('members' in declaration) { - type = 'Tuple'; - } else if ('memberType' in declaration) { - type = 'List'; - } else { - type = 'Collection'; - } - } else { - type = 'Any'; - } - } else { - type = 'Any'; - } - } - - return { - ...rest, - type - } as SchemaType; -} - -/** - * ArrayBuffer.isView will only evaluate if an object instance is an - * ArrayBufferView, but TypeScript metadata gives us a reference to the class. - * - * This function checks if the provided constructor is or extends the built-in - * `ArrayBuffer` constructor, the `DataView` constructor, or any `TypedArray` - * constructor. - * - * This function will need to be modified if new binary types are added to - * JavaScript (e.g., the `Int64Array` or `Uint64Array` discussed in - * {@link https://github.com/tc39/proposal-bigint the BigInt TC39 proposal}. - * - * @see https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView - */ -function isBinaryType(arg: any): boolean { - return arg === Uint8Array || arg.prototype instanceof Uint8Array || - arg === Uint8ClampedArray || arg.prototype instanceof Uint8ClampedArray || - arg === Uint16Array || arg.prototype instanceof Uint16Array || - arg === Uint32Array || arg.prototype instanceof Uint32Array || - arg === Int8Array || arg.prototype instanceof Int8Array || - arg === Int16Array || arg.prototype instanceof Int16Array || - arg === Int32Array || arg.prototype instanceof Int32Array || - arg === Float32Array || arg.prototype instanceof Float32Array || - arg === Float64Array || arg.prototype instanceof Float64Array || - arg === ArrayBuffer || arg.prototype instanceof ArrayBuffer || - arg === DataView || arg.prototype instanceof DataView; -} diff --git a/packages/dynamodb-data-mapper-annotations/src/autoGeneratedHashKey.spec.ts b/packages/dynamodb-data-mapper-annotations/src/autoGeneratedHashKey.spec.ts deleted file mode 100644 index 75f91b25..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/autoGeneratedHashKey.spec.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { autoGeneratedHashKey } from './autoGeneratedHashKey'; -import { v4 } from 'uuid'; - -jest.mock('./attribute', () => ({attribute: jest.fn()})); -import { attribute } from './attribute'; - -describe('autoGeneratedHashKey', () => { - beforeEach(() => { - (attribute as any).mockClear(); - }); - - it( - 'should call attribute with a defined type, keyType, and defaultProvider', - () => { - autoGeneratedHashKey(); - - expect((attribute as any).mock.calls.length).toBe(1); - expect((attribute as any).mock.calls[0]).toEqual([ - { - type: 'String', - keyType: 'HASH', - defaultProvider: v4, - } - ]); - } - ); - - it('should pass through any supplied parameters', () => { - const attributeName = 'foo' - autoGeneratedHashKey({attributeName}); - - expect((attribute as any).mock.calls[0][0]) - .toMatchObject({attributeName}); - }); -}); diff --git a/packages/dynamodb-data-mapper-annotations/src/autoGeneratedHashKey.ts b/packages/dynamodb-data-mapper-annotations/src/autoGeneratedHashKey.ts deleted file mode 100644 index 4a65558a..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/autoGeneratedHashKey.ts +++ /dev/null @@ -1,14 +0,0 @@ -import {PropertyAnnotation} from './annotationShapes'; -import {hashKey} from './hashKey'; -import {StringType} from '@aws/dynamodb-data-marshaller'; -import {v4} from 'uuid'; - -export function autoGeneratedHashKey( - parameters: Partial = {} -): PropertyAnnotation { - return hashKey({ - ...parameters, - type: 'String', - defaultProvider: v4, - }); -} \ No newline at end of file diff --git a/packages/dynamodb-data-mapper-annotations/src/constants.ts b/packages/dynamodb-data-mapper-annotations/src/constants.ts deleted file mode 100644 index 7849fa61..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/constants.ts +++ /dev/null @@ -1 +0,0 @@ -export const METADATA_TYPE_KEY = 'design:type'; diff --git a/packages/dynamodb-data-mapper-annotations/src/exampleSchema.fixture.ts b/packages/dynamodb-data-mapper-annotations/src/exampleSchema.fixture.ts deleted file mode 100644 index c39ad27d..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/exampleSchema.fixture.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { - attribute, - autoGeneratedHashKey, - rangeKey, - table, - versionAttribute, -} from "./"; -import { embed } from '@aws/dynamodb-data-mapper'; - -export class Author { - @attribute() - name?: string; - - @attribute({memberType: {type: 'String'}}) - socialMediaHandles?: Map; - - @attribute() - photo?: Uint8Array; -} - -export class Comment { - /** - * The time at which this comment was posted - */ - @attribute() - timestamp?: Date; - - /** - * Whether this comment has been approved by a moderator. - */ - @attribute() - approved?: boolean; - - /** - * The title of the comment - */ - @attribute() - subject?: string; - - /** - * The text of the comment - */ - @attribute() - text?: string; - - /** - * The handle of the comment author - */ - @attribute() - author?: string; - - /** - * The number of upvotes this comment has received. - */ - @attribute() - upvotes?: number; - - /** - * The number of downvotes this comment has received. - */ - @attribute() - downvotes?: number; - - /** - * Replies to this comment - */ - @attribute({ memberType: embed(Comment) }) - replies?: Array; -} - -@table('Posts') -export class Post { - @autoGeneratedHashKey() - id?: string; - - @rangeKey() - createdAt?: Date; - - @versionAttribute() - version?: number; - - @attribute() - author?: Author; - - @attribute() - content?: string; - - @attribute() - title?: string; - - @attribute() - subtitle?: string; - - @attribute() - imageLink?: string; - - @attribute({ memberType: { type: 'String' }}) - corrections?: Array; - - /** - * Replies to this post - */ - @attribute({ memberType: embed(Comment) }) - replies?: Array; - - @attribute({ memberType: 'String' }) - tags?: Set; -} diff --git a/packages/dynamodb-data-mapper-annotations/src/functional.spec.ts b/packages/dynamodb-data-mapper-annotations/src/functional.spec.ts deleted file mode 100644 index 01a15b3a..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/functional.spec.ts +++ /dev/null @@ -1,115 +0,0 @@ -import { Author, Comment, Post } from './exampleSchema.fixture'; -import { DataMapper, DynamoDbSchema } from '@aws/dynamodb-data-mapper'; -import { isSchema } from '@aws/dynamodb-data-marshaller'; - -jest.mock('uuid', () => ({v4: jest.fn(() => 'uuid')})); - -describe('annotations', () => { - it('should create a schema that includes references to property schemas', () => { - const postSchema = (Post.prototype as any)[DynamoDbSchema]; - expect(isSchema(postSchema)).toBe(true); - expect(isSchema(postSchema.author.members)).toBe(true); - expect(isSchema(postSchema.replies.memberType.members)).toBe(true); - }); - - it('should support recursive shapes in the generated schema', () => { - const commentSchema = (Comment.prototype as any)[DynamoDbSchema]; - expect(isSchema(commentSchema)).toBe(true); - expect(isSchema(commentSchema.replies.memberType.members)).toBe(true); - expect(commentSchema.replies.memberType.members).toBe(commentSchema); - }); - - it('should marshall a full object graph according to the schema', async () => { - const promiseFunc = jest.fn(() => Promise.resolve({Item: {}})); - const mockDynamoDbClient = { - config: {}, - putItem: jest.fn(() => ({promise: promiseFunc})), - }; - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - const post = new Post(); - post.createdAt = new Date(0); - post.author = new Author(); - post.author.name = 'John Smith'; - post.author.photo = Uint8Array.from([0xde, 0xad, 0xbe, 0xef]); - post.author.socialMediaHandles = new Map([ - ['github', 'john_smith_27834231'], - ['twitter', 'theRealJohnSmith'], - ]); - post.title = 'Review of Rob Loblaw\'s Law Blog'; - post.subtitle = 'Does it live up to the hype?'; - post.content = "It's a great law blog."; - post.corrections = [ - 'The first edition of this post did not adequately attest to the law blog\'s greatness.' - ]; - post.replies = [new Comment()]; - - post.replies[0].author = 'Rob Loblaw'; - post.replies[0].timestamp = new Date(0); - post.replies[0].subject = 'Great review'; - post.replies[0].text = 'Appreciate the congrats'; - post.replies[0].upvotes = 35; - post.replies[0].downvotes = 0; - post.replies[0].approved = true; - - const reply = new Comment(); - reply.author = 'John Smith'; - reply.timestamp = new Date(60000); - reply.subject = 'Great review of my review'; - reply.text = 'Thanks for reading!'; - reply.approved = true; - - post.replies[0].replies = [reply]; - - await mapper.put(post); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .toMatchObject({ - ConditionExpression: 'attribute_not_exists(#attr0)', - ExpressionAttributeNames: {'#attr0': 'version'}, - TableName: 'Posts', - Item: { - author: {M: { - name: {S: "John Smith"}, - photo: {B: Uint8Array.from([0xde, 0xad, 0xbe, 0xef])}, - socialMediaHandles: {M: { - github: {S: "john_smith_27834231"}, - twitter: {S: "theRealJohnSmith"} - }} - }}, - content: {S: "It's a great law blog."}, - corrections: {L: [ - {S: "The first edition of this post did not adequately attest to the law blog's greatness."} - ]}, - createdAt: {N: "0"}, - id: {S: "uuid"}, - replies: {L: [ - {M: { - approved: {BOOL: true}, - author: {S: "Rob Loblaw"}, - downvotes: {N: "0"}, - replies: {L: [ - {M: { - approved: {BOOL: true}, - author: {S: "John Smith"}, - subject: {S: "Great review of my review"}, - text: {S: "Thanks for reading!"}, - timestamp: {N: "60"} - }} - ]}, - subject: {S: "Great review"}, - text: {S: "Appreciate the congrats"}, - timestamp: {N: "0"}, - upvotes: {N: "35"} - }} - ]}, - subtitle: {S: "Does it live up to the hype?"}, - title: {S: "Review of Rob Loblaw's Law Blog"}, - version: {N: "0"} - }, - }); - }); -}); diff --git a/packages/dynamodb-data-mapper-annotations/src/hashKey.spec.ts b/packages/dynamodb-data-mapper-annotations/src/hashKey.spec.ts deleted file mode 100644 index a978c5d9..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/hashKey.spec.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { hashKey } from './hashKey'; - -jest.mock('./attribute', () => ({attribute: jest.fn()})); -import {attribute} from './attribute'; - -describe('hashKey', () => { - beforeEach(() => { - (attribute as any).mockClear(); - }); - - it('should call attribute with a defined keyType', () => { - hashKey(); - - expect((attribute as any).mock.calls.length).toBe(1); - expect((attribute as any).mock.calls[0]).toEqual([ - {keyType: 'HASH'} - ]); - }); - - it('should pass through any supplied parameters', () => { - const attributeName = 'foo' - hashKey({attributeName}); - - expect((attribute as any).mock.calls[0][0]) - .toMatchObject({attributeName}); - }); -}); diff --git a/packages/dynamodb-data-mapper-annotations/src/hashKey.ts b/packages/dynamodb-data-mapper-annotations/src/hashKey.ts deleted file mode 100644 index e35f6bc1..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/hashKey.ts +++ /dev/null @@ -1,18 +0,0 @@ -import {PropertyAnnotation} from './annotationShapes'; -import {attribute} from './attribute'; -import { - BinaryType, - CustomType, - DateType, - NumberType, - StringType, -} from '@aws/dynamodb-data-marshaller'; - -export function hashKey( - parameters: Partial|DateType|NumberType|StringType> = {} -): PropertyAnnotation { - return attribute({ - ...parameters, - keyType: 'HASH', - }); -} \ No newline at end of file diff --git a/packages/dynamodb-data-mapper-annotations/src/index.ts b/packages/dynamodb-data-mapper-annotations/src/index.ts deleted file mode 100644 index 05111709..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -export * from './attribute'; -export * from './autoGeneratedHashKey'; -export * from './hashKey'; -export * from './rangeKey'; -export * from './table'; -export * from './versionAttribute'; diff --git a/packages/dynamodb-data-mapper-annotations/src/rangeKey.spec.ts b/packages/dynamodb-data-mapper-annotations/src/rangeKey.spec.ts deleted file mode 100644 index 4843ecbb..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/rangeKey.spec.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { rangeKey } from './rangeKey'; - -jest.mock('./attribute', () => ({attribute: jest.fn()})); -import { attribute } from './attribute'; - -describe('rangeKey', () => { - beforeEach(() => { - (attribute as any).mockClear(); - }); - - it('should call attribute with a defined keyType', () => { - rangeKey(); - - expect((attribute as any).mock.calls.length).toBe(1); - expect((attribute as any).mock.calls[0]).toEqual([ - {keyType: 'RANGE'} - ]); - }); - - it('should pass through any supplied parameters', () => { - const attributeName = 'foo' - rangeKey({attributeName}); - - expect((attribute as any).mock.calls[0][0]) - .toMatchObject({attributeName}); - }); -}); diff --git a/packages/dynamodb-data-mapper-annotations/src/rangeKey.ts b/packages/dynamodb-data-mapper-annotations/src/rangeKey.ts deleted file mode 100644 index 38141ff9..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/rangeKey.ts +++ /dev/null @@ -1,18 +0,0 @@ -import {PropertyAnnotation} from './annotationShapes'; -import {attribute} from './attribute'; -import { - BinaryType, - CustomType, - DateType, - NumberType, - StringType, -} from '@aws/dynamodb-data-marshaller'; - -export function rangeKey( - parameters: Partial|DateType|NumberType|StringType> = {} -): PropertyAnnotation { - return attribute({ - ...parameters, - keyType: 'RANGE', - }); -} \ No newline at end of file diff --git a/packages/dynamodb-data-mapper-annotations/src/table.spec.ts b/packages/dynamodb-data-mapper-annotations/src/table.spec.ts deleted file mode 100644 index 2388ddc9..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/table.spec.ts +++ /dev/null @@ -1,16 +0,0 @@ -import {table} from "./table"; -import {DynamoDbTable} from '@aws/dynamodb-data-mapper'; - -describe('table', () => { - it( - 'should bind the provided table name to the target in a way compatible with the DynamoDbTable protocol', - () => { - class MyDocument {} - const tableName = 'tableName'; - const decorator = table(tableName); - decorator(MyDocument); - - expect((new MyDocument() as any)[DynamoDbTable]).toBe(tableName); - } - ); -}); diff --git a/packages/dynamodb-data-mapper-annotations/src/table.ts b/packages/dynamodb-data-mapper-annotations/src/table.ts deleted file mode 100644 index b601298a..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/table.ts +++ /dev/null @@ -1,17 +0,0 @@ -import {ClassAnnotation} from './annotationShapes'; -import {DynamoDbTable} from '@aws/dynamodb-data-mapper'; - -/** - * Declare a TypeScript class to represent items in a table in a way that is - * understandable by the AWS DynamoDB DataMapper for JavaScript. Meant to be - * used as a TypeScript class decorator in projects compiled with the - * `experimentalDecorators` option enabled. - * - * @see https://www.typescriptlang.org/docs/handbook/decorators.html - * @see https://www.typescriptlang.org/docs/handbook/compiler-options.html - */ -export function table(tableName: string): ClassAnnotation { - return constructor => { - constructor.prototype[DynamoDbTable] = tableName; - }; -} diff --git a/packages/dynamodb-data-mapper-annotations/src/versionAttribute.spec.ts b/packages/dynamodb-data-mapper-annotations/src/versionAttribute.spec.ts deleted file mode 100644 index 73ca848e..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/versionAttribute.spec.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { versionAttribute } from './versionAttribute'; - -jest.mock('./attribute', () => ({attribute: jest.fn()})); -import { attribute } from './attribute'; - -describe('versionAttribute', () => { - beforeEach(() => { - (attribute as any).mockClear(); - }); - - it( - 'should call attribute with a defined type and versionAttribute trait', - () => { - versionAttribute(); - - expect((attribute as any).mock.calls.length).toBe(1); - expect((attribute as any).mock.calls[0]).toEqual([ - { - type: 'Number', - versionAttribute: true, - } - ]); - } - ); - - it('should pass through any supplied parameters', () => { - const attributeName = 'foo' - versionAttribute({attributeName}); - - expect((attribute as any).mock.calls[0][0]) - .toMatchObject({attributeName}); - }); -}); diff --git a/packages/dynamodb-data-mapper-annotations/src/versionAttribute.ts b/packages/dynamodb-data-mapper-annotations/src/versionAttribute.ts deleted file mode 100644 index 2b453997..00000000 --- a/packages/dynamodb-data-mapper-annotations/src/versionAttribute.ts +++ /dev/null @@ -1,13 +0,0 @@ -import {PropertyAnnotation} from './annotationShapes'; -import {attribute} from './attribute'; -import {NumberType} from '@aws/dynamodb-data-marshaller'; - -export function versionAttribute( - parameters: Partial = {} -): PropertyAnnotation { - return attribute({ - ...parameters, - type: 'Number', - versionAttribute: true, - }); -} \ No newline at end of file diff --git a/packages/dynamodb-data-mapper-annotations/tsconfig.json b/packages/dynamodb-data-mapper-annotations/tsconfig.json deleted file mode 100644 index ecc678cc..00000000 --- a/packages/dynamodb-data-mapper-annotations/tsconfig.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": [ - "es5", - "es2015.iterable", - "es2015.promise", - "es2015.collection", - "es2015.symbol.wellknown", - "esnext.asynciterable" - ], - "downlevelIteration": true, - "importHelpers": true, - "module": "commonjs", - "strict": true, - "noUnusedLocals": true, - "declaration": true, - "sourceMap": true, - "experimentalDecorators": true, - "emitDecoratorMetadata": true, - "rootDir": "./src", - "outDir": "./build" - }, - "typedocOptions": { - "mode": "file", - "out": "../../docs/packages/dynamodb-data-mapper-annotations", - "excludeNotExported": true, - "excludePrivate": true, - "hideGenerator": true - } -} diff --git a/packages/dynamodb-data-mapper-annotations/tsconfig.test.json b/packages/dynamodb-data-mapper-annotations/tsconfig.test.json deleted file mode 100644 index 57f7d5b1..00000000 --- a/packages/dynamodb-data-mapper-annotations/tsconfig.test.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "sourceMap": false, - "inlineSourceMap": true, - "inlineSources": true, - "rootDir": "./src", - "outDir": "./build" - } -} diff --git a/packages/dynamodb-data-mapper/.gitignore b/packages/dynamodb-data-mapper/.gitignore deleted file mode 100644 index 01c77675..00000000 --- a/packages/dynamodb-data-mapper/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!jest.integration.js diff --git a/packages/dynamodb-data-mapper/.npmignore b/packages/dynamodb-data-mapper/.npmignore deleted file mode 100644 index 1d116ecc..00000000 --- a/packages/dynamodb-data-mapper/.npmignore +++ /dev/null @@ -1,10 +0,0 @@ -/src -/node_modules -/coverage - -*.spec.d.ts -*.spec.js -*.spec.js.map - -tsconfig.json -tsconfig.test.json diff --git a/packages/dynamodb-data-mapper/CHANGELOG.md b/packages/dynamodb-data-mapper/CHANGELOG.md deleted file mode 100644 index 4b4a99fb..00000000 --- a/packages/dynamodb-data-mapper/CHANGELOG.md +++ /dev/null @@ -1,131 +0,0 @@ -# Changelog -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). - -## [0.7.3] -### Added - - Export query/scan/parallelScan iterator and paginator classes. - -## [0.7.2] -### Fixed - - Sort key configurations in `CreateTableInput` so that hash keys always appear - first. - -## [0.7.1] -### Removed - - Remove package rollup at `./build/index.mjs` due to bundler incompatibilities. - -## [0.7.0] -### Added - - Add a package rollup at `./build/index.mjs` to support tree shaking. - -## [0.6.0] -### Fixed - - Update `query` and `scan` to serialize whatever key properties are provided - without injecting any defaulted values. - - Update `DataMapper` for TypeScript 2.9 compatibility. - -### Added - - Use purpose-built async iterable objects as the return value for `query`, - `scan`, and `parallelScan`. - - Report the `count`, `scannedCount`, and `consumedCapacity` tallied over the - lifetime of a `query`, `scan`, or `parallelScan` as properties on the - returned iterable. - - Provide a method to get the underlying paginator for a `query`, `scan`, or - `parallelScan` iterator. The paginator may be used to suspend and resume - iteration at any page boundary. - - Add `limit` parameter to `scan` and `query` to automatically cease iteration - once a certain number of items have been returned or the results have been - exhausted, whichever comes first. - -## [0.5.0] -### Fixed - - Add default message to `ItemNotFoundException` - - Ensure options provided are used when `query` is called with a named - parameter bag. -### Added - - Add support for executing custom update expressions. - -## [0.4.2] -### Fixed - - Ensure `query` and `scan` marshall exclusive start keys for the specified - index. - -## [0.4.1] -### Fixed - - Ensure `query` returns instances of the provided model class. - -## [0.4.0] -### Added - - Add `createTable` to create tables based on table names and schemas bound to - constructor prototypes - - Add `ensureTableExists` to create a table only if it does not already exist - - Add `deleteTable` to delete tables based on table names bound to constructor - prototypes - - Add `ensureTableNotExists` to delete a table only if it is not already - deleted - -## [0.3.2] -### Fixed - - Only include expression name or value substitions when a substitution has - occurred - -## [0.3.1] -### Fixed - - Ensure retried writes in a `batchDelete`, `batchPut`, or `batchWrite` are - only yielded once - -## [0.3.0] -### Added - - Add `batchGet`, which allows a synchronous or asynchronous iterable of items - (like those supplied to `get`) to be automatically grouped into - `BatchGetItem` operations. - - Add `batchDelete`, which allows a synchronous or asynchronous iterable of - items (like those supplied to `delete`) to be automatically grouped into - `BatchWriteItem` operations. - - Add `batchPut`, which allows a synchronous or asynchronous iterable of - items (like those supplied to `put`) to be automatically grouped into - `BatchWriteItem` operations. - - Add `batchWrite`, which allows a synchronous or asynchronous iterable of - tuples of tags (`'put'` or `'delete'`) and items (like those supplied to the - `put` or `delete` methods, respectively) to be automatically grouped into - `BatchWriteItem` operations. - -## [0.2.1] -### Added - - Add the ability to call all DataMapper methods with positional rather than - named parameters - - Add API documentation - -### Deprecated - - Deprecate calling DataMapper methods with a single bag of named parameters - -## [0.2.0] -### Removed - - **BREAKING CHANGE**: Removed the `returnValues` parameter from `put`. `put` - will now always return the value that was persisted, thereby providing - access to injected defaults and accurate version numbers. - -### Added - - Add a `parallelScan` method to the DataMapper. - - Add optional parameters to the `scan` method to allow its use as a parallel - scan worker - - Add a `pageSize` parameter to `query` and `scan` to limit the size of pages - fetched during a read. `pageSize` was previously called `limit`. - -### Changed - - Use TSLib instead of having TypeScript generate helpers to reduce bundle size - -### Deprecated - - Deprecate `limit` parameter on `query` and `scan`. It has been renamed to - `pageSize`, though a value provided for `limit` will still be used if no - `pageSize` parameter is provided. - -## [0.1.1] -### Fixed - - Update dependency version to match released version identifier - -## [0.1.0] -Initial release diff --git a/packages/dynamodb-data-mapper/LICENSE b/packages/dynamodb-data-mapper/LICENSE deleted file mode 100644 index da05f5c9..00000000 --- a/packages/dynamodb-data-mapper/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2017 Amazon.com, Inc. or its affiliates - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/dynamodb-data-mapper/README.md b/packages/dynamodb-data-mapper/README.md deleted file mode 100644 index d40258a4..00000000 --- a/packages/dynamodb-data-mapper/README.md +++ /dev/null @@ -1,705 +0,0 @@ -# Amazon DynamoDB DataMapper - -[![Apache 2 License](https://img.shields.io/github/license/awslabs/dynamodb-data-mapper-js.svg?style=flat)](http://aws.amazon.com/apache-2-0/) - -This library provides a `DataMapper` class that allows easy interoperability -between your application's domain classes and their persisted form in Amazon -DynamoDB. Powered by the `@aws/dynamodb-data-marshaller` and -`@aws/dynamodb-expressions` packages, using `DataMapper` lets you define each -object's persisted representation once and then load, save, scan, and query your -tables using the vocabulary of your application domain rather than its -representation in DynamoDB. - -## Getting started - -To use the `DataMapper` with a given JavaScript class, you will need to add a -couple properties to the prototype of the class you would like to map to a -DynamoDB table. Specifically, you will need to provide a schema and the name of -the table: - -```typescript -import {DynamoDbSchema, DynamoDbTable} from '@aws/dynamodb-data-mapper'; - -class MyDomainModel { - // declare methods and properties as normal -} - -Object.defineProperties(MyDomainModel.prototype, { - [DynamoDbTable]: { - value: 'MyTable' - }, - [DynamoDbSchema]: { - value: { - id: { - type: 'String', - keyType: 'HASH' - }, - foo: {type: 'String'}, - bar: { - type: 'Set', - memberType: 'String', - }, - baz: { - type: 'Tuple', - members: [ - {type: 'Boolean'}, - {type: 'String'}, - ], - }, - }, - }, -}); -``` - -The schema and table name may be declared as property accessors directly on the -class if the value should be determined dynamically: - -```typescript -import {DynamoDbTable} from '@aws/dynamodb-data-mapper'; - -class MyOtherDomainClass { - id: number; - - get [DynamoDbTable]() { - return this.id % 2 === 0 ? 'evens' : 'odds'; - } -} -``` - -Next, create an instance of `DataMapper` and use the `MyDomainClass` constructor -defined above to save and load objects from DynamoDB: - -```typescript -import { - DataMapper, - DynamoDbSchema, - DynamoDbTable, -} from '@aws/dynamodb-data-mapper'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const client = new DynamoDB({region: 'us-west-2'}); -const mapper = new DataMapper({client}); - -class MyDomainModel { - id: string; - foo?: string; - bar?: Set; - baz?: [boolean, string]; -} - -Object.defineProperties(MyDomainModel.prototype, { - [DynamoDbTable]: { - value: 'MyTable' - }, - [DynamoDbSchema]: { - value: { - id: { - type: 'String', - keyType: 'HASH' - }, - foo: {type: 'String'}, - bar: { - type: 'Set', - memberType: 'String', - }, - baz: { - type: 'Tuple', - members: [ - {type: 'Boolean'}, - {type: 'String'}, - ], - }, - }, - }, -}); - -// delete an object -const toDelete = new MyDomainModel(); -toDelete.id = 'DELETE_ME'; -mapper.delete(toDelete); - -// if that's too verbose, you can write the above as a single expression with -// Object.assign: -mapper.delete(Object.assign(new MyDomainModel(), {id: 'DELETE_ME'})); - -// fetch an object -const toGet = new MyDomainModel(); -toGet.id = 'ID_TO_FETCH'; -const fetched = await mapper.get(toGet); - -// this should return a rejected promise, as it's fetching an object that does -// not exist -mapper.get(toDelete) - .catch(err => console.log('I expected this to happen')); - -// put something new into the database -const toPut = new MyDomainModel(); -toPut.id = 'NEW_RECORD'; -toPut.foo = 'bar'; -toPut.bar = new Set(['fizz', 'buzz', 'pop']); -toPut.baz = [true, 'quux']; - -mapper.put(toPut).then((persisted: MyDomainModel) => { - // now change the record a bit - const toUpdate = new MyDomainModel(); - toUpdate.id = persisted.id; - toUpdate.baz = [false, 'beep']; - return mapper.update(toUpdate, {onMissing: 'skip'}); -}); -``` - -## Supported operations - -### `batchDelete` - -Deletes any number of items from one or more tables in batches of 25 or fewer -items. Unprocessed deletes are retried following an exponentially increasing -backoff delay that is applied on a per-table basis. - -Returns an async iterable of items that have been deleted (deleted items are -yielded when the delete has been accepted by DynamoDB). The results can be -consumed with a `for-await-of` loop. If you are using TypeScript, you will need -to include `esnext.asynciterable` in your `lib` declaration (as well as enabling -`downlevelIteration` if targeting ES5 or lower). Please refer to [the TypeScript -release notes](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-3.html#async-iteration) -for more information. - -Takes one parameter: - -* An iterable (synchronous or asynchronous) of items to delete. Each item must - be an instance of a class with a table name accessible via a property - identified with the `DynamoDbTable` symbol and a schema accessible via a - property identified with the `DynamoDbSchema` symbol. - -### `batchGet` - -Fetches any number of items from one or more tables in batches of 100 or fewer -items. Unprocessed reads are retried following an exponentially increasing -backoff delay that is applied on a per-table basis. - -Takes two parameters: - -* An iterable (synchronous or asynchronous) of items to fetch. Each item must be - an instance of a class with a table name accessible via a property - identified with the `DynamoDbTable` symbol and a schema accessible via a - property identified with the `DynamoDbSchema` symbol. - -* (Optional) An object specifying any of the following options: - - * `readConsistency` - Specify `'strong'` to perform a strongly consistent - read. Specify `'eventual'` (the default) to perform an eventually - consistent read. - - * `perTableOptions` - An object whose keys are table names and whose values - are objects specifying any of the following options: - - * `readConsistency` - Specify `'strong'` to perform a strongly - consistent read. Specify `'eventual'` (the default) to perform an - eventually consistent read. - - * `projection` - A projection expression directing DynamoDB to return a - subset of the fetched item's attributes. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for - guidance on creating projection expression objects. - - * `projectionSchema` - The schema to use when mapping the supplied - `projection` option to the attribute names used in DynamoDB. - - This parameter is only necessary if a batch contains items from - multiple classes that map to the *same* table using *different* - property names to represent the same DynamoDB attributes. - - If not supplied, the schema associated with the first item - associated with a given table will be used in its place. - -### `batchPut` - -Puts any number of items to one or more tables in batches of 25 or fewer items. -Unprocessed puts are retried following an exponentially increasing backoff delay -that is applied on a per-table basis. - -Returns an async iterable of items that have been put (put items are yielded -when the put has been accepted by DynamoDB). The results can be consumed with a -`for-await-of` loop. If you are using TypeScript, you will need to include -`esnext.asynciterable` in your `lib` declaration (as well as enabling -`downlevelIteration` if targeting ES5 or lower). Please refer to [the TypeScript -release notes](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-3.html#async-iteration) -for more information. - -Takes one parameter: - -* An iterable (synchronous or asynchronous) of items to put. Each item must be - an instance of a class with a table name accessible via a property - identified with the `DynamoDbTable` symbol and a schema accessible via a - property identified with the `DynamoDbSchema` symbol. - -### `batchWrite` - -Puts or deletes any number of items to one or more tables in batches of 25 or -fewer items. Unprocessed writes are retried following an exponentially -increasing backoff delay that is applied on a per-table basis. - -Returns an async iterable of tuples of the string 'put'|'delete' and the item on -which the specified write action was performed. The results can be consumed with -a `for-await-of` loop. If you are using TypeScript, you will need to include -`esnext.asynciterable` in your `lib` declaration (as well as enabling -`downlevelIteration` if targeting ES5 or lower). Please refer to [the TypeScript -release notes](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-3.html#async-iteration) -for more information. - -Takes one parameter: - -* An iterable (synchronous or asynchronous) of tuples of the string - 'put'|'delete' and the item on which to perform the specified write action. - Each item must be an instance of a class with a table name accessible via a - property identified with the `DynamoDbTable` symbol and a schema accessible - via a property identified with the `DynamoDbSchema` symbol. - -### `delete` - -Removes an item from a DynamoDB table. Takes two parameters: - -* The item to be deleted. Must be an instance of a class with a table name - accessible via a property identified with the `DynamoDbTable` symbol and a - schema accessible via a property identified with the `DynamoDbSchema` - symbol. - -* (Optional) An object specifying any of the following options: - - * `condition` - A condition expression whose assertion must be satisfied in - order for the delete operation to be executed. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for guidance - on creating condition expression objects. - - * `returnValues` - Specify `'ALL_OLD'` to have the deleted item returned to - you when the delete operation completes. - - * `skipVersionCheck` - Whether to forgo creating a condition expression - based on a defined `versionAttribute` in the schema. - -### `get` - -Fetches an item from a DynamoDB table. If no item with the specified key was -found, the returned promise will be rejected with an error. Takes two -parameters: - -* The item to be fetched. Must be an instance of a class with a table name - accessible via a property identified with the `DynamoDbTable` symbol and a - schema accessible via a property identified with the `DynamoDbSchema` - symbol. - - The supplied item will **NOT** be updated in place. Rather, a new item of - the same class with data from the DynamoDB table will be returned. - -* (Optional) An object specifying any of the following options: - - * `readConsistency` - Specify `'strong'` to perform a strongly consistent - read. Specify `'eventual'` (the default) to perform an eventually - consistent read. - - * `projection` - A projection expression directing DynamoDB to return a - subset of the fetched item's attributes. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for guidance - on creating projection expression objects. - -### `put` - -Inserts an item into a DynamoDB table. Takes two parameters: - -* The item to be inserted. Must be an instance of a class with a table name - accessible via a property identified with the `DynamoDbTable` symbol and a - schema accessible via a property identified with the `DynamoDbSchema` - symbol. - -* (Optional) An object specifying any of the following options: - - * `condition` - A condition expression whose assertion must be satisfied in - order for the put operation to be executed. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for guidance - on creating condition expression objects. - - * `returnValues` - Specify `'ALL_OLD'` to have the overwritten item (if one - existed) returned to you when the put operation completes. - - * `skipVersionCheck` - Whether to forgo creating a condition expression - based on a defined `versionAttribute` in the schema. - -### `query` - -Retrieves multiple values from a table or index based on the primary key -attributes. Queries must target a single partition key value but may read -multiple items with different range keys. - -This method is implemented as an async iterator and the results can be consumed -with a `for-await-of` loop. If you are using TypeScript, you will need to -include `esnext.asynciterable` in your `lib` declaration (as well as enabling -`downlevelIteration` if targeting ES5 or lower). Please refer to [the TypeScript -release notes](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-3.html#async-iteration) -for more information. - -Takes three parameters: - -* The constructor function to use for any results returned by this operation. - Must have a prototype with a table name accessible via a property identified - with the `DynamoDbTable` symbol and a schema accessible via a property - identified with the `DynamoDbSchema` symbol. - -* The condition that specifies the key value(s) for items to be retrieved by the - query operation. You may provide a hash matching key properties to the - values they must equal, a hash matching keys to - `ConditionExpressionPredicate`s, or a fully composed `ConditionExpression`. - If a hash is provided, it may contain a mixture of condition expression - predicates and exact value matches: - - ```typescript - import {between} from '@aws/dynamodb-expressions'; - - const keyCondition = { - partitionKey: 'foo', - rangeKey: between(10, 99), - }; - ``` - - The key condition must target a single value for the partition key. - - Please refer to the documentation for the `@aws/dynamodb-expressions` - package for guidance on creating condition expression objects. - -* (Optional) An object specifying any of the following options: - - * `filter` - A condition expression that DynamoDB applies after the Query - operation, but before the data is returned to you. Items that do not - satisfy the `filter` criteria are not returned. - - You cannot define a filter expression based on a partition key or a sort - key. - - Please refer to the documentation for the `@aws/dynamodb-expressions` - package for guidance on creating condition expression objects. - - * `indexName` - The name of the index against which to execute this query. - If not specified, the query will be executed against the base table. - - * `limit` - The maximum number of items to return. - - * `pageSize` - The maximum number of items to return **per page of results**. - - * `projection` - A projection expression directing DynamoDB to return a - subset of any fetched item's attributes. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for guidance - on creating projection expression objects. - - * `readConsistency` - Specify `'strong'` to perform a strongly consistent - read. Specify `'eventual'` (the default) to perform an eventually - consistent read. - - * `scanIndexForward` - Specifies the order for index traversal: If true, the - traversal is performed in ascending order; if false, the traversal is - performed in descending order. - - * `startKey` - The primary key of the first item that this operation will - evaluate. - -#### Query metadata - -The iterator returned by `query` will keep track of the number of items yielded -and the number of items scanned via its `count` and `scannedCount` properties: - -```typescript -const iterator = mapper.query( - MyClass, - {partitionKey: 'foo', rangeKey: between(0, 10)} -); -for await (const record of iterator) { - console.log(record, iterator.count, iterator.scannedCount); -} -``` - -#### Pagination - -If you wish to perform a resumable query, you can use the `.pages()` method of -the iterator returned by `query` to access the underlying paginator. The -paginator differs from the iterator in that it yields arrays of unmarshalled -records and has a `lastEvaluatedKey` property that may be provided to a new -call to `mapper.query` to resume the query later or in a separate process: - -```typescript -const paginator = mapper.query( - MyClass, - {partitionKey: 'foo', rangeKey: between(0, 10)}, - { - // automatically stop after 25 items or the entire result set has been - // fetched, whichever is smaller - limit: 25 - } -).pages(); - -for await (const page of paginator) { - console.log( - paginator.count, - paginator.scannedCount, - paginator.lastEvaluatedKey - ); -} - -const newPaginator = mapper.query( - MyClass, - {partitionKey: 'foo', rangeKey: between(0, 10)}, - { - // start this new paginator where the previous one stopped - startKey: paginator.lastEvaluatedKey - } -).pages(); -``` - -### `scan` - -Retrieves all values in a table or index. - -This method is implemented as an async iterator and the results can be consumed -with a `for-await-of` loop. If you are using TypeScript, you will need to -include `esnext.asynciterable` in your `lib` declaration (as well as enabling -`downlevelIteration` if targeting ES5 or lower). Please refer to [the TypeScript -release notes](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-3.html#async-iteration) -for more information. - -Takes two parameters: - -* The constructor function to use for any results returned by this operation. - Must have a prototype with a table name accessible via a property identified - with the `DynamoDbTable` symbol and a schema accessible via a property - identified with the `DynamoDbSchema` symbol. - -* (Optional) An object specifying any of the following options: - - * `filter` - A condition expression that DynamoDB applies after the scan - operation, but before the data is returned to you. Items that do not - satisfy the `filter` criteria are not returned. - - You cannot define a filter expression based on a partition key or a sort - key. - - Please refer to the documentation for the `@aws/dynamodb-expressions` - package for guidance on creating condition expression objects. - - * `indexName` - The name of the index against which to execute this query. - If not specified, the query will be executed against the base table. - - * `limit` - The maximum number of items to return. - - * `pageSize` - The maximum number of items to return **per page of results**. - - * `projection` - A projection expression directing DynamoDB to return a - subset of any fetched item's attributes. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for guidance - on creating projection expression objects. - - * `readConsistency` - Specify `'strong'` to perform a strongly consistent - read. Specify `'eventual'` (the default) to perform an eventually - consistent read. - - * `segment` - The identifier for this segment (if this scan is being - performed as part of a parallel scan operation). - - * `startKey` - The primary key of the first item that this operation will - evaluate. - - * `totalSegments` - The number of segments into which this scan has been - divided (if this scan is being performed as part of a parallel scan - operation). - -#### Scan metadata - -The iterator returned by `scan` will keep track of the number of items yielded -and the number of items scanned via its `count` and `scannedCount` properties: - -```typescript -const iterator = mapper.scan(MyClass); -for await (const record of iterator) { - console.log(record, iterator.count, iterator.scannedCount); -} -``` - -#### Pagination - -If you wish to perform a resumable scan, you can use the `.pages()` method of -the iterator returned by `scan` to access the underlying paginator. The -paginator differs from the iterator in that it yields arrays of unmarshalled -records and has a `lastEvaluatedKey` property that may be provided to a new -call to `mapper.scan` to resume the scan later or in a separate process: - -```typescript -const paginator = mapper.scan( - MyClass, - { - // automatically stop after 25 items or the entire result set has been - // fetched, whichever is smaller - limit: 25 - } -).pages(); -for await (const page of paginator) { - console.log( - paginator.count, - paginator.scannedCount, - paginator.lastEvaluatedKey - ); -} - -const newPaginator = mapper.scan( - MyClass, - { - // start this new paginator where the previous one stopped - startKey: paginator.lastEvaluatedKey - } -).pages(); -``` - -### `parallelScan` - -Retrieves all values in a table by dividing the table into segments, all of -which are scanned in parallel. - -This method is implemented as an async iterator and the results can be consumed -with a `for-await-of` loop. If you are using TypeScript, you will need to -include `esnext.asynciterable` in your `lib` declaration (as well as enabling -`downlevelIteration` if targeting ES5 or lower). Please refer to [the TypeScript -release notes](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-3.html#async-iteration) -for more information. - -Takes three parameters: - -* The constructor to use for any results returned by this operation. Must have a - prototype with a table name accessible via a property identified with the - `DynamoDbTable` symbol and a schema accessible via a property identified - with the `DynamoDbSchema` symbol. - -* The total number of parallel workers to use to scan the table. - -* (Optional) An object specifying any of the following options: - - * `filter` - A condition expression that DynamoDB applies after the scan - operation, but before the data is returned to you. Items that do not - satisfy the `filter` criteria are not returned. - - You cannot define a filter expression based on a partition key or a sort - key. - - Please refer to the documentation for the `@aws/dynamodb-expressions` - package for guidance on creating condition expression objects. - - * `indexName` - The name of the index against which to execute this query. - If not specified, the query will be executed against the base table. - - * `pageSize` - The maximum number of items to return **per page of results**. - - * `projection` - A projection expression directing DynamoDB to return a - subset of any fetched item's attributes. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for guidance - on creating projection expression objects. - - * `readConsistency` - Specify `'strong'` to perform a strongly consistent - read. Specify `'eventual'` (the default) to perform an eventually - consistent read. - - * `startKey` - The primary key of the first item that this operation will - evaluate. - -#### Scan metadata - -The iterator returned by `parallelScan` will keep track of the number of items -yielded and the number of items scanned via its `count` and `scannedCount` -properties: - -```typescript -const iterator = mapper.parallelScan(MyClass, 4); -for await (const record of iterator) { - console.log(record, iterator.count, iterator.scannedCount); -} -``` - -#### Pagination - -If you wish to perform a resumable parallel scan, you can use the `.pages()` -method of the iterator returned by `parallelScan` to access the underlying -paginator. The paginator differs from the iterator in that it yields arrays of -unmarshalled records and has a `scanState` property that may be provided -to a new call to `mapper.parallelScan` to resume the scan later or in a separate -process: - -```typescript -const paginator = mapper.parallelScan( - MyClass, - 4 -).pages(); -for await (const page of paginator) { - console.log( - paginator.count, - paginator.scannedCount, - paginator.lastEvaluatedKey - ); - - break; -} - -const newPaginator = mapper.parallelScan( - MyClass, - 4, - { - // start this new paginator where the previous one stopped - scanState: paginator.scanState - } -).pages(); -``` - -### `update` - -Updates an item in a DynamoDB table. Will leave attributes not defined in the -schema in place. - -Takes two parameters: - -* The item with its desired property state. Must be an instance of a class with - a table name accessible via a property identified with the `DynamoDbTable` - symbol and a schema accessible via a property identified with the - `DynamoDbSchema` symbol. - -* (Optional) An object specifying any of the following options: - - * `condition` - A condition expression whose assertion must be satisfied in - order for the update operation to be executed. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for guidance - on creating condition expression objects. - - * `onMissing` - Specify `'remove'` (the default) to treat the absence of a - value in the supplied `item` as a directive to remove the property from - the record in DynamoDB. Specify `'skip'` to only update the properties - that are defined in the supplied `item`. - - * `skipVersionCheck` - Whether to forgo creating a condition expression - based on a defined `versionAttribute` in the schema. - -### `executeUpdateExpression` - -Executes a custom update expression. This method will **not** automatically -apply a version check, as the current state of the object being updated is not -known. - -Takes four parameters: - -* The expression to execute. Please refer to the documentation for the - `@aws/dynamodb-expressions` package for guidance on creating update - expression objects. - -* The key of the item being updated. - -* The constructor for the class mapped to the table against which the expression - should be run. Must have a prototype with a table name accessible via a - property identified with the `DynamoDbTable` symbol and a schema accessible - via a property identified with the `DynamoDbSchema` symbol. - -* (Optional) An object specifying any of the following options: - - * `condition` - A condition expression whose assertion must be satisfied in - order for the update operation to be executed. Please refer to the - documentation for the `@aws/dynamodb-expressions` package for guidance - on creating condition expression objects. diff --git a/packages/dynamodb-data-mapper/jest.integration.js b/packages/dynamodb-data-mapper/jest.integration.js deleted file mode 100644 index 76e1c4df..00000000 --- a/packages/dynamodb-data-mapper/jest.integration.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - collectCoverage: true, - testMatch: [ - '**/?(*.)(integ).js' - ] -}; diff --git a/packages/dynamodb-data-mapper/package.json b/packages/dynamodb-data-mapper/package.json deleted file mode 100644 index bd940e57..00000000 --- a/packages/dynamodb-data-mapper/package.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "name": "@aws/dynamodb-data-mapper", - "version": "0.7.3", - "description": "A schema-based data mapper for Amazon DynamoDB", - "keywords": [ - "aws", - "dynamodb" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" - }, - "bugs": { - "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" - }, - "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-data-mapper/", - "main": "./build/index.js", - "types": "./build/index.d.ts", - "scripts": { - "build": "tsc", - "docs": "typedoc src", - "integ": "npm run pretest && jest --config=jest.integration.js", - "prepublishOnly": "npm run build", - "pretest": "tsc -p tsconfig.test.json", - "test": "jest \"build/(.+).spec.js\"" - }, - "author": { - "name": "AWS SDK for JavaScript Team", - "email": "aws-sdk-js@amazon.com" - }, - "license": "Apache-2.0", - "devDependencies": { - "@types/jest": "^24", - "@types/node": "^8.0.4", - "aws-sdk": "^2.7.0", - "jest": "^24", - "typedoc": "^0.14.0", - "typescript": "^3.4" - }, - "dependencies": { - "@aws/dynamodb-auto-marshaller": "^0.7.1", - "@aws/dynamodb-batch-iterator": "^0.7.1", - "@aws/dynamodb-data-marshaller": "^0.7.3", - "@aws/dynamodb-expressions": "^0.7.3", - "@aws/dynamodb-query-iterator": "^0.7.1", - "tslib": "^1.9" - }, - "peerDependencies": { - "aws-sdk": "^2.7.0" - } -} diff --git a/packages/dynamodb-data-mapper/src/BatchState.ts b/packages/dynamodb-data-mapper/src/BatchState.ts deleted file mode 100644 index 7ced38e7..00000000 --- a/packages/dynamodb-data-mapper/src/BatchState.ts +++ /dev/null @@ -1,13 +0,0 @@ -import {Schema, ZeroArgumentsConstructor} from '@aws/dynamodb-data-marshaller'; - -export interface BatchState { - [tableName: string]: { - keyProperties: Array; - itemSchemata: { - [identifier: string]: { - schema: Schema; - constructor: ZeroArgumentsConstructor; - }; - }; - }; -} diff --git a/packages/dynamodb-data-mapper/src/DataMapper.integ.ts b/packages/dynamodb-data-mapper/src/DataMapper.integ.ts deleted file mode 100644 index 16f38d3d..00000000 --- a/packages/dynamodb-data-mapper/src/DataMapper.integ.ts +++ /dev/null @@ -1,226 +0,0 @@ -import {DataMapper} from './DataMapper'; -import {ItemNotFoundException} from './ItemNotFoundException'; -import {DynamoDbSchema, DynamoDbTable} from './protocols'; -import {hostname} from 'os'; -import {hrtime} from 'process'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); -import {DocumentType} from "@aws/dynamodb-data-marshaller"; -import {Schema} from "@aws/dynamodb-data-marshaller"; -import {equals} from "@aws/dynamodb-expressions"; - -const nestedDocumentDef: DocumentType = { - type: 'Document', - members: { - foo: {type: 'String'} - } -}; -nestedDocumentDef.members.recursive = nestedDocumentDef; - -interface NestedDocument { - foo?: string; - recursive?: NestedDocument; -} - -const [seconds, nanoseconds] = hrtime(); -const TableName = `mapper-integ-${seconds}-${nanoseconds}-${hostname()}`; -const schema: Schema = { - key: { - type: 'Number', - attributeName: 'testIndex', - keyType: 'HASH', - }, - timestamp: {type: 'Date'}, - data: nestedDocumentDef, - tuple: { - type: 'Tuple', - members: [ - {type: 'Boolean'}, - {type: 'String'}, - ] - }, - scanIdentifier: {type: 'Number'} -}; - -class TestRecord { - key!: number; - timestamp?: Date; - data?: NestedDocument; - tuple?: [boolean, string]; - scanIdentifier?: number; -} - -Object.defineProperties(TestRecord.prototype, { - [DynamoDbSchema]: {value: schema}, - [DynamoDbTable]: {value: TableName}, -}); - -describe('DataMapper', () => { - let idx = 0; - const ddbClient = new DynamoDB(); - const mapper = new DataMapper({client: ddbClient}); - jest.setTimeout(60000); - - beforeAll(() => { - return mapper.ensureTableExists(TestRecord, { - readCapacityUnits: 10, - writeCapacityUnits: 10, - }); - }); - - afterAll(() => { - return mapper.ensureTableNotExists(TestRecord); - }); - - it('should save and load objects', async () => { - const key = idx++; - const mapper = new DataMapper({client: ddbClient}); - const timestamp = new Date(); - // subsecond precision will not survive the trip through the serializer, - // as DynamoDB's ttl fields use unix epoch (second precision) timestamps - timestamp.setMilliseconds(0); - const item = new TestRecord(); - item.key = key; - item.timestamp = timestamp; - item.data = { - recursive: { - recursive: { - recursive: { - foo: '', - }, - }, - }, - }; - - expect(await mapper.put(item)).toEqual(item); - - expect(await mapper.get(item, {readConsistency: 'strong'})) - .toEqual(item); - }); - - it('should delete objects', async () => { - const key = idx++; - const mapper = new DataMapper({client: ddbClient}); - const timestamp = new Date(); - // subsecond precision will not survive the trip through the serializer, - // as DynamoDB's ttl fields use unix epoch (second precision) timestamps - timestamp.setMilliseconds(0); - const item = new TestRecord(); - item.key = key; - item.timestamp = timestamp; - item.data = { - recursive: { - recursive: { - recursive: { - foo: '', - }, - }, - }, - }; - - await mapper.put(item); - - await expect(mapper.get(item, {readConsistency: 'strong'})).resolves; - - await mapper.delete(item); - - await expect(mapper.get(item, {readConsistency: 'strong'})) - .rejects - .toMatchObject(new ItemNotFoundException({ - TableName, - ConsistentRead: true, - Key: {testIndex: {N: key.toString(10)}} - })); - }); - - it('should scan objects', async () => { - const keys: Array = []; - const mapper = new DataMapper({client: ddbClient}); - const scanIdentifier = Date.now(); - - const items: Array = []; - for (let i = 0; i < 30; i++) { - const item = new TestRecord(); - item.key = idx++; - item.tuple = [item.key % 2 === 0, 'string']; - item.scanIdentifier = scanIdentifier; - keys.push(item.key); - items.push(item); - } - - for await (const _ of mapper.batchPut(items)) {} - - const results: Array = []; - for await (const element of mapper.scan(TestRecord, { - readConsistency: 'strong', - filter: { - ...equals(scanIdentifier), - subject: 'scanIdentifier' - }, - })) { - results.push(element); - } - - expect(results.sort((a, b) => a.key - b.key)).toEqual(keys.map(key => { - const record = new TestRecord(); - record.key = key; - record.scanIdentifier = scanIdentifier; - record.tuple = [key % 2 === 0, 'string']; - return record; - })); - }); - - it('should scan objects in parallel', async () => { - const keys: Array = []; - const mapper = new DataMapper({client: ddbClient}); - const scanIdentifier = Date.now(); - - const items: Array = []; - for (let i = 0; i < 10; i++) { - const item = new TestRecord(); - item.key = idx++; - item.tuple = [item.key % 2 === 0, 'string']; - item.scanIdentifier = scanIdentifier; - keys.push(item.key); - items.push(item); - } - - for await (const _ of mapper.batchPut(items)) {} - - const results: Array = []; - for await (const element of mapper.parallelScan(TestRecord, 4, { - readConsistency: 'strong', - filter: { - ...equals(scanIdentifier), - subject: 'scanIdentifier' - }, - })) { - results.push(element); - } - - expect(results.sort((a, b) => a.key - b.key)).toEqual(keys.map(key => { - const record = new TestRecord(); - record.key = key; - record.scanIdentifier = scanIdentifier; - record.tuple = [key % 2 === 0, 'string']; - return record; - })); - }); - - it('should query objects', async () => { - const mapper = new DataMapper({client: ddbClient}); - - const item = new TestRecord(); - item.key = idx++; - item.tuple = [item.key % 2 === 0, 'string']; - - await mapper.put({item}); - - for await (const element of mapper.query( - TestRecord, - {key: item.key}, - {readConsistency: 'strong'} - )) { - expect(element).toEqual(item); - } - }); -}); diff --git a/packages/dynamodb-data-mapper/src/DataMapper.spec.ts b/packages/dynamodb-data-mapper/src/DataMapper.spec.ts deleted file mode 100644 index d525e0e3..00000000 --- a/packages/dynamodb-data-mapper/src/DataMapper.spec.ts +++ /dev/null @@ -1,4529 +0,0 @@ -import {DataMapper} from "./DataMapper"; -import { - DynamoDbSchema, - DynamoDbTable, -} from "./protocols"; -import {Schema} from "@aws/dynamodb-data-marshaller"; -import { - AttributePath, - between, - equals, - FunctionExpression, - inList, - UpdateExpression, -} from "@aws/dynamodb-expressions"; -import {ItemNotFoundException} from "./ItemNotFoundException"; -import {BatchGetOptions, ParallelScanState, GlobalSecondaryIndexOptions} from './index'; -import { - BatchGetItemInput, - BatchWriteItemInput, - DescribeTableOutput, - GetItemOutput, - PutItemOutput -} from "aws-sdk/clients/dynamodb"; - -type BinaryValue = ArrayBuffer|ArrayBufferView; - -describe('DataMapper', () => { - it('should set the customUserAgent config property on the client', () => { - const client: any = {config: {}}; - new DataMapper({client}); - - expect(client.config.customUserAgent) - .toMatch('dynamodb-data-mapper-js/'); - }); - - describe('#batchDelete', () => { - const promiseFunc = jest.fn(() => Promise.resolve({ - UnprocessedItems: {} - })); - const mockDynamoDbClient = { - config: {}, - batchWriteItem: jest.fn(() => ({promise: promiseFunc})), - }; - - beforeEach(() => { - promiseFunc.mockClear(); - mockDynamoDbClient.batchWriteItem.mockClear(); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - class Item { - constructor(public fizz?: number) {} - - get [DynamoDbTable](): string { - return 'foo'; - } - - get [DynamoDbSchema](): Schema { - return { - fizz: { - type: 'Number', - keyType: 'HASH' - } - }; - } - } - - for (const asyncInput of [true, false]) { - it( - 'should should partition delete batches into requests with 25 or fewer items', - async () => { - const deletes: Array = []; - const expected: any = [ - [{RequestItems: {foo: []}}], - [{RequestItems: {foo: []}}], - [{RequestItems: {foo: []}}], - [{RequestItems: {foo: []}}], - ]; - for (let i = 0; i < 80; i++) { - deletes.push(new Item(i)); - expected[Math.floor(i / 25)][0].RequestItems.foo.push({ - DeleteRequest: { - Key: { - fizz: {N: String(i)} - } - } - }); - } - - const input = asyncInput - ? async function *() { - for (const item of deletes) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() - : deletes; - - for await (const deleted of mapper.batchDelete(input)) { - expect(deleted).toBeInstanceOf(Item); - } - - const {calls} = mockDynamoDbClient.batchWriteItem.mock; - expect(calls.length).toBe(4); - expect(calls).toEqual(expected); - } - ); - - it('should should retry unprocessed items', async () => { - const deletes: Array = []; - for (let i = 0; i < 80; i++) { - deletes.push(new Item(i)); - } - - const failures = new Set(['24', '42', '60']); - for (const failureId of failures) { - const item = { - DeleteRequest: { - Key: {fizz: {N: failureId}} - } - }; - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - UnprocessedItems: {foo: [item]} - })); - } - - const input = asyncInput - ? async function *() { - for (const item of deletes) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() - : deletes; - - for await (const deleted of mapper.batchDelete(input)) { - expect(deleted).toBeInstanceOf(Item); - } - - const {calls} = mockDynamoDbClient.batchWriteItem.mock; - expect(calls.length).toBe(4); - const callCount: {[key: string]: number} = (calls as Array>).reduce( - ( - keyUseCount: {[key: string]: number}, - [{RequestItems: {foo}}] - ) => { - for (const {DeleteRequest: {Key: {fizz: {N: key}}}} of (foo as any)) { - if (key in keyUseCount) { - keyUseCount[key]++; - } else { - keyUseCount[key] = 1; - } - } - - return keyUseCount; - }, - {} - ); - - for (let i = 0; i < 80; i++) { - expect(callCount[i]).toBe(failures.has(String(i)) ? 2 : 1); - } - }); - } - }); - - describe('#batchGet', () => { - const promiseFunc = jest.fn(() => Promise.resolve({ - UnprocessedItems: {} - })); - const mockDynamoDbClient = { - config: {}, - batchGetItem: jest.fn(() => ({promise: promiseFunc})), - }; - - beforeEach(() => { - promiseFunc.mockClear(); - mockDynamoDbClient.batchGetItem.mockClear(); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - class Item { - public buzz?: boolean; - public pop?: string; - - constructor(public fizz: number) {} - - get [DynamoDbTable](): string { - return 'foo'; - } - - get [DynamoDbSchema](): Schema { - return { - fizz: { - type: 'Number', - keyType: 'HASH' - }, - buzz: {type: 'Boolean'}, - pop: {type: 'String'} - }; - } - } - - it('should allow setting an overall read consistency', async () => { - const gets = [new Item(0)]; - for await (const _ of mapper.batchGet(gets, {readConsistency: 'strong'})) { - // pass - } - - expect(mockDynamoDbClient.batchGetItem.mock.calls).toEqual([ - [ - { - RequestItems: { - foo: { - Keys: [ - {fizz: {N: '0'}} - ], - ConsistentRead: true - } - } - } - ] - ]) - }); - - it('should allow setting per-table read consistency', async () => { - const gets =[ - new Item(0), - { - quux: 1, - [DynamoDbTable]: 'bar', - [DynamoDbSchema]: { - quux: { - type: 'Number', - keyType: 'HASH', - } - } - }, - ]; - const config: BatchGetOptions = { - readConsistency: 'eventual', - perTableOptions: { - bar: { - readConsistency: 'strong' - } - } - } - - for await (const _ of mapper.batchGet(gets, config)) { - // pass - } - - expect(mockDynamoDbClient.batchGetItem.mock.calls).toEqual([ - [ - { - RequestItems: { - foo: { - Keys: [ - {fizz: {N: '0'}} - ], - }, - bar: { - Keys: [ - {quux: {N: '1'}} - ], - ConsistentRead: true - } - } - } - ] - ]); - }); - - it('should allow specifying per-table projection expressions', async () => { - const gets =[ - new Item(0), - { - quux: 1, - [DynamoDbTable]: 'bar', - [DynamoDbSchema]: { - quux: { - type: 'Number', - keyType: 'HASH' - }, - snap: { - type: 'Document', - attributeName: 'crackle', - members: { - pop: { - type: 'String', - attributeName: 'squark', - } - } - }, - mixedList: { - type: 'Collection', - attributeName: 'myList' - } - } - }, - ]; - const config: BatchGetOptions = { - perTableOptions: { - bar: { - projection: ['snap.pop', 'mixedList[2]'] - } - } - }; - - for await (const _ of mapper.batchGet(gets, config)) { - // pass - } - - expect(mockDynamoDbClient.batchGetItem.mock.calls).toEqual([ - [ - { - RequestItems: { - foo: { - Keys: [ - {fizz: {N: '0'}} - ] - }, - bar: { - Keys: [ - {quux: {N: '1'}} - ], - ProjectionExpression: '#attr0.#attr1, #attr2[2]', - ExpressionAttributeNames: { - '#attr0': 'crackle', - '#attr1': 'squark', - '#attr2': 'myList', - } - } - } - } - ] - ]); - }); - - for (const asyncInput of [true, false]) { - it( - 'should should partition get batches into requests with 100 or fewer items', - async () => { - const gets: Array = []; - const expected: any = [ - [ { RequestItems: { foo: { Keys: [] } } } ], - [ { RequestItems: { foo: { Keys: [] } } } ], - [ { RequestItems: { foo: { Keys: [] } } } ], - [ { RequestItems: { foo: { Keys: [] } } } ], - ]; - const responses: any = [ - {Responses: {foo: []}}, - {Responses: {foo: []}}, - {Responses: {foo: []}}, - {Responses: {foo: []}}, - ]; - - for (let i = 0; i < 325; i++) { - gets.push(new Item(i)); - responses[Math.floor(i / 100)].Responses.foo.push({ - fizz: {N: String(i)}, - buzz: {BOOL: i % 2 === 0}, - pop: {S: 'Goes the weasel'} - }); - expected[Math.floor(i / 100)][0].RequestItems.foo.Keys - .push({fizz: {N: String(i)}}); - } - - for (const response of responses) { - promiseFunc.mockImplementationOnce( - () => Promise.resolve(response) - ); - } - - const input = asyncInput - ? async function *() { - for (const item of gets) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() - : gets; - - for await (const item of mapper.batchGet(input)) { - expect(item).toBeInstanceOf(Item); - expect(item.buzz).toBe(item.fizz % 2 === 0); - expect(item.pop).toBe('Goes the weasel'); - } - - const {calls} = mockDynamoDbClient.batchGetItem.mock; - expect(calls.length).toBe(4); - expect(calls).toEqual(expected); - } - ); - - it('should should retry unprocessed items', async () => { - const failures = new Set(['24', '142', '260']); - - const gets: Array = []; - const expected: any = [ - [{RequestItems: {foo: {Keys: []}}}], - [{RequestItems: {foo: {Keys: []}}}], - [{RequestItems: {foo: {Keys: []}}}], - [{RequestItems: {foo: {Keys: []}}}], - ]; - const responses: any = [ - { - Responses: {foo: []}, - UnprocessedKeys: {foo: {Keys: []}} - }, - { - Responses: {foo: []}, - UnprocessedKeys: {foo: {Keys: []}} - }, - { - Responses: {foo: []}, - UnprocessedKeys: {foo: {Keys: []}} - }, - { - Responses: {foo: []}, - UnprocessedKeys: {foo: {Keys: []}} - }, - ]; - - let currentRequest = 0; - for (let i = 0; i < 325; i++) { - gets.push(new Item(i)); - expected[currentRequest][0].RequestItems.foo.Keys - .push({fizz: {N: String(i)}}); - - const response = { - fizz: {N: String(i)}, - buzz: {BOOL: i % 2 === 0}, - pop: {S: 'Goes the weasel'} - }; - - if (failures.has(String(i))) { - responses[currentRequest].UnprocessedKeys.foo.Keys - .push({fizz: {N: String(i)}}); - responses[currentRequest + 1].Responses.foo.push(response); - } - else { - responses[currentRequest].Responses.foo.push(response); - if (responses[currentRequest].Responses.foo.length === 99) { - currentRequest++; - } - } - } - - for (const response of responses) { - promiseFunc.mockImplementationOnce( - () => Promise.resolve(response) - ); - } - - const input = asyncInput - ? async function *() { - for (const item of gets) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() - : gets; - - let itemsReturned = 0; - for await (const item of mapper.batchGet(input)) { - expect(item).toBeInstanceOf(Item); - expect(item.buzz).toBe(item.fizz % 2 === 0); - expect(item.pop).toBe('Goes the weasel'); - itemsReturned++; - } - - expect(itemsReturned).toBe(325); - - const {calls} = mockDynamoDbClient.batchGetItem.mock; - const callCount: {[key: string]: number} = (calls as Array>).reduce( - ( - keyUseCount: {[key: string]: number}, - [{RequestItems: {foo: {Keys}}}] - ) => { - for (const {fizz: {N: key}} of (Keys as any)) { - if (key in keyUseCount) { - keyUseCount[key]++; - } else { - keyUseCount[key] = 1; - } - } - - return keyUseCount; - }, - {} - ); - - for (let i = 0; i < 325; i++) { - expect(callCount[i]).toBe(failures.has(String(i)) ? 2 : 1); - } - }); - } - }); - - describe('#batchPut', () => { - const promiseFunc = jest.fn(() => Promise.resolve({ - UnprocessedItems: {} - })); - const mockDynamoDbClient = { - config: {}, - batchWriteItem: jest.fn(() => ({promise: promiseFunc})), - }; - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - let counter = 0; - class Item { - fizz?: number; - - buzz?: Set; - - get [DynamoDbTable](): string { - return 'foo'; - } - - get [DynamoDbSchema](): Schema { - return { - fizz: { - type: 'Number', - keyType: 'HASH', - defaultProvider() { - return counter++; - } - }, - buzz: { - type: 'Set', - memberType: 'String' - } - }; - } - } - - beforeEach(() => { - counter = 0; - promiseFunc.mockClear(); - mockDynamoDbClient.batchWriteItem.mockClear(); - }); - - for (const asyncInput of [true, false]) { - - it( - 'should should partition put batches into requests with 25 or fewer items', - async () => { - const puts: Array = []; - const expected: any = [ - [{RequestItems: {foo: []}}], - [{RequestItems: {foo: []}}], - [{RequestItems: {foo: []}}], - [{RequestItems: {foo: []}}], - ]; - for (let i = 0; i < 80; i++) { - puts.push(new Item()); - expected[Math.floor(i / 25)][0].RequestItems.foo.push({ - PutRequest: { - Item: { - fizz: {N: String(i)} - } - } - }); - } - - const input = asyncInput - ? async function *() { - for (const item of puts) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() - : puts; - - for await (const item of mapper.batchPut(input)) { - expect(item).toBeInstanceOf(Item); - expect(typeof item.fizz).toBe('number'); - } - - const {calls} = mockDynamoDbClient.batchWriteItem.mock; - expect(calls.length).toBe(4); - expect(calls).toEqual(expected); - } - ); - - it('should should retry unprocessed items', async () => { - const puts: Array = []; - for (let i = 0; i < 80; i++) { - const item = new Item(); - item.buzz = new Set(['foo', 'bar', 'baz']); - puts.push(item); - } - - const failures = new Set(['24', '42', '60']); - for (const failureId of failures) { - const item = { - PutRequest: { - Item: { - fizz: {N: failureId}, - buzz: {SS: ['foo', 'bar', 'baz']} - } - } - }; - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - UnprocessedItems: {foo: [item]} - })); - } - - const input = asyncInput - ? async function *() { - for (const item of puts) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() - : puts; - - for await (const item of mapper.batchPut(input)) { - expect(item).toBeInstanceOf(Item); - expect(typeof item.fizz).toBe('number'); - expect(item.buzz).toBeInstanceOf(Set); - } - - const {calls} = mockDynamoDbClient.batchWriteItem.mock; - expect(calls.length).toBe(4); - const callCount: {[key: string]: number} = (calls as Array>).reduce( - ( - keyUseCount: {[key: string]: number}, - [{RequestItems: {foo}}] - ) => { - for (const {PutRequest: {Item: {fizz: {N: key}}}} of (foo as any)) { - if (key in keyUseCount) { - keyUseCount[key]++; - } else { - keyUseCount[key] = 1; - } - } - - return keyUseCount; - }, - {} - ); - - for (let i = 0; i < 80; i++) { - expect(callCount[i]).toBe(failures.has(String(i)) ? 2 : 1); - } - }); - } - }); - - describe('#createGlobalSecondaryIndex', () => { - const waitPromiseFunc = jest.fn(() => Promise.resolve()); - const updateTablePromiseFunc = jest.fn(() => Promise.resolve({})); - const mockDynamoDbClient = { - config: {}, - updateTable: jest.fn(() => ({promise: updateTablePromiseFunc})), - waitFor: jest.fn(() => ({promise: waitPromiseFunc})), - }; - - beforeEach(() => { - updateTablePromiseFunc.mockClear(); - mockDynamoDbClient.updateTable.mockClear(); - waitPromiseFunc.mockClear(); - mockDynamoDbClient.waitFor.mockClear(); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - class Item { - get [DynamoDbTable]() { return 'foo' } - - get [DynamoDbSchema]() { - return { - id: { - type: 'String', - keyType: 'HASH' - }, - description: { - type: 'String', - indexKeyConfigurations: { - DescriptionIndex: 'HASH' - } - } - }; - } - } - - const DescriptionIndex: GlobalSecondaryIndexOptions = { - projection: 'all', - readCapacityUnits: 1, - type: 'global', - writeCapacityUnits: 1 - }; - - it('should make and send an UpdateTable request', async () => { - await mapper.createGlobalSecondaryIndex(Item, 'DescriptionIndex', { - indexOptions: { - DescriptionIndex - }, - readCapacityUnits: 5, - writeCapacityUnits: 5, - }); - - expect(mockDynamoDbClient.updateTable.mock.calls).toEqual([ - [ - { - TableName: 'foo', - AttributeDefinitions: [ - { - AttributeName: 'id', - AttributeType: 'S' - }, - { - AttributeName: 'description', - AttributeType: 'S' - } - ], - GlobalSecondaryIndexUpdates: [ - { - Create: { - IndexName: 'DescriptionIndex', - KeySchema: [ - { - AttributeName: 'description', - KeyType: 'HASH' - } - ], - Projection: { - ProjectionType: 'ALL' - }, - ProvisionedThroughput: { - ReadCapacityUnits: 1, - WriteCapacityUnits: 1 - } - } - } - ], - }, - ] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls).toEqual([ - [ 'tableExists', { TableName: 'foo' } ], - ]); - }); - }) - - describe('#createTable', () => { - const waitPromiseFunc = jest.fn(() => Promise.resolve()); - const createTablePromiseFunc = jest.fn(() => Promise.resolve({})); - const mockDynamoDbClient = { - config: {}, - createTable: jest.fn(() => ({promise: createTablePromiseFunc})), - waitFor: jest.fn(() => ({promise: waitPromiseFunc})), - }; - - beforeEach(() => { - createTablePromiseFunc.mockClear(); - mockDynamoDbClient.createTable.mockClear(); - waitPromiseFunc.mockClear(); - mockDynamoDbClient.waitFor.mockClear(); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - class Item { - get [DynamoDbTable]() { return 'foo' } - - get [DynamoDbSchema]() { - return { id: { type: 'String', keyType: 'HASH' } }; - } - } - - it('should make and send a CreateTable request', async () => { - await mapper.createTable(Item, { - readCapacityUnits: 5, - writeCapacityUnits: 5, - }); - - expect(mockDynamoDbClient.createTable.mock.calls).toEqual([ - [ - { - TableName: 'foo', - AttributeDefinitions: [ - { - AttributeName: 'id', - AttributeType: 'S' - } - ], - KeySchema: [ - { - AttributeName: 'id', - KeyType: 'HASH', - } - ], - ProvisionedThroughput: { - ReadCapacityUnits: 5, - WriteCapacityUnits: 5, - }, - StreamSpecification: { StreamEnabled: false }, - SSESpecification: { Enabled: false }, - }, - ] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls).toEqual([ - [ 'tableExists', { TableName: 'foo' } ], - ]); - }); - - it( - 'should forgo invoking the waiter if the table is already active', - async () => { - createTablePromiseFunc.mockImplementationOnce(() => Promise.resolve({ - TableDescription: {TableStatus: 'ACTIVE'} - })); - - await mapper.createTable(Item, { - readCapacityUnits: 5, - writeCapacityUnits: 5, - }); - - expect(mockDynamoDbClient.createTable.mock.calls.length).toBe(1); - - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - } - ); - - it('should allow enabling streams', async () => { - await mapper.createTable(Item, { - readCapacityUnits: 5, - streamViewType: 'NEW_AND_OLD_IMAGES', - writeCapacityUnits: 5, - }); - - expect(mockDynamoDbClient.createTable.mock.calls).toEqual([ - [ - { - TableName: 'foo', - AttributeDefinitions: [ - { - AttributeName: 'id', - AttributeType: 'S' - } - ], - KeySchema: [ - { - AttributeName: 'id', - KeyType: 'HASH', - } - ], - ProvisionedThroughput: { - ReadCapacityUnits: 5, - WriteCapacityUnits: 5, - }, - StreamSpecification: { - StreamEnabled: true, - StreamViewType: 'NEW_AND_OLD_IMAGES' - }, - SSESpecification: { Enabled: false }, - }, - ] - ]); - }); - - it('should create new table with on-demand capacity mode', async () => { - await mapper.createTable(Item, { - billingMode: 'PAY_PER_REQUEST', - }); - - expect(mockDynamoDbClient.createTable.mock.calls).toEqual([ - [ - { - TableName: 'foo', - AttributeDefinitions: [ - { - AttributeName: 'id', - AttributeType: 'S' - } - ], - KeySchema: [ - { - AttributeName: 'id', - KeyType: 'HASH', - } - ], - BillingMode: 'PAY_PER_REQUEST', - StreamSpecification: { StreamEnabled: false }, - SSESpecification: { Enabled: false }, - }, - ] - ]); - }); - - it('should allow enabling sse using AWS managed CMK', async () => { - await mapper.createTable(Item, { - readCapacityUnits: 5, - writeCapacityUnits: 5, - sseSpecification: { - sseType: 'KMS', - }, - }); - - expect(mockDynamoDbClient.createTable.mock.calls).toEqual([ - [ - { - TableName: 'foo', - AttributeDefinitions: [ - { - AttributeName: 'id', - AttributeType: 'S' - } - ], - KeySchema: [ - { - AttributeName: 'id', - KeyType: 'HASH', - } - ], - ProvisionedThroughput: { - ReadCapacityUnits: 5, - WriteCapacityUnits: 5, - }, - StreamSpecification: { StreamEnabled: false }, - SSESpecification: { - Enabled: true, - SSEType: 'KMS', - }, - }, - ] - ]); - }); - - describe('index keys', () => { - class IndexedItem { - get [DynamoDbTable]() { return 'foo' } - - get [DynamoDbSchema]() { - return { - partitionKey: { - type: 'Number', - keyType: 'HASH', - }, - createdAt: { - type: 'Date', - keyType: 'RANGE', - indexKeyConfigurations: { - chronological: 'HASH', - globalIndex: 'RANGE' - }, - attributeName: 'timestamp' - }, - createdBy: { - type: 'String', - indexKeyConfigurations: { - globalIndex: 'HASH', - localIndex: 'RANGE' - }, - attributeName: 'creator', - }, - binaryKey: { - type: 'Binary', - indexKeyConfigurations: { - binaryIndex: 'HASH' - } - }, - customKey: { - type: 'Custom', - attributeType: 'S', - marshall: (str: string) => str, - unmarshall: (av: any) => av.S, - indexKeyConfigurations: { - binaryIndex: 'RANGE', - }, - }, - listProp: { type: 'Collection' }, - }; - } - } - - it('should identify and report index keys', async () => { - await mapper.createTable(IndexedItem, { - readCapacityUnits: 5, - writeCapacityUnits: 5, - indexOptions: { - binaryIndex: { - type: 'global', - readCapacityUnits: 2, - writeCapacityUnits: 3, - projection: ['createdBy', 'createdAt'], - }, - chronological: { - type: 'global', - readCapacityUnits: 5, - writeCapacityUnits: 5, - projection: 'all', - }, - globalIndex: { - type: 'global', - readCapacityUnits: 6, - writeCapacityUnits: 7, - projection: 'all', - }, - localIndex: { - type: 'local', - projection: 'keys', - }, - } - }); - - expect(mockDynamoDbClient.createTable.mock.calls).toEqual([ - [ - { - AttributeDefinitions: [ - { - AttributeName: 'partitionKey', - AttributeType: 'N' - }, - { - AttributeName: 'timestamp', - AttributeType: 'N' - }, - { - AttributeName: 'creator', - AttributeType: 'S' - }, - { - AttributeName: 'binaryKey', - AttributeType: 'B' - }, - { - AttributeName: 'customKey', - AttributeType: 'S' - }, - ], - GlobalSecondaryIndexes: [ - { - IndexName: 'chronological', - KeySchema: [ - { - AttributeName: 'timestamp', - KeyType: 'HASH', - }, - ], - Projection: { ProjectionType: 'ALL' }, - ProvisionedThroughput: { - ReadCapacityUnits: 5, - WriteCapacityUnits: 5, - }, - }, - { - IndexName: 'globalIndex', - KeySchema: [ - { - AttributeName: 'creator', - KeyType: 'HASH', - }, - { - AttributeName: 'timestamp', - KeyType: 'RANGE', - }, - ], - Projection: { ProjectionType: 'ALL' }, - ProvisionedThroughput: { - ReadCapacityUnits: 6, - WriteCapacityUnits: 7, - }, - }, - { - IndexName: 'binaryIndex', - KeySchema: [ - { - AttributeName: 'binaryKey', - KeyType: 'HASH', - }, - { - AttributeName: 'customKey', - KeyType: 'RANGE', - }, - ], - Projection: { - ProjectionType: 'INCLUDE', - NonKeyAttributes: [ - 'creator', - 'timestamp', - ], - }, - ProvisionedThroughput: { - ReadCapacityUnits: 2, - WriteCapacityUnits: 3, - }, - }, - ], - KeySchema: [ - { - AttributeName: 'partitionKey', - KeyType: 'HASH', - }, - { - AttributeName: 'timestamp', - KeyType: 'RANGE', - }, - ], - LocalSecondaryIndexes: [ - { - IndexName: 'localIndex', - KeySchema: [ - { - AttributeName: 'creator', - KeyType: 'RANGE', - }, - ], - Projection: { ProjectionType: 'KEYS_ONLY' }, - }, - ], - ProvisionedThroughput: { - ReadCapacityUnits: 5, - WriteCapacityUnits: 5, - }, - StreamSpecification: { StreamEnabled: false }, - SSESpecification: { Enabled: false }, - TableName: 'foo', - }, - ], - ]); - }); - - it('should identify and report index keys with on-demand capacity mode', async () => { - await mapper.createTable(IndexedItem, { - billingMode: 'PAY_PER_REQUEST', - indexOptions: { - binaryIndex: { - type: 'global', - projection: ['createdBy', 'createdAt'], - }, - chronological: { - type: 'global', - projection: 'all', - }, - globalIndex: { - type: 'global', - projection: 'all', - }, - localIndex: { - type: 'local', - projection: 'keys', - }, - } - }); - - expect(mockDynamoDbClient.createTable.mock.calls).toEqual([ - [ - { - AttributeDefinitions: [ - { - AttributeName: 'partitionKey', - AttributeType: 'N' - }, - { - AttributeName: 'timestamp', - AttributeType: 'N' - }, - { - AttributeName: 'creator', - AttributeType: 'S' - }, - { - AttributeName: 'binaryKey', - AttributeType: 'B' - }, - { - AttributeName: 'customKey', - AttributeType: 'S' - }, - ], - GlobalSecondaryIndexes: [ - { - IndexName: 'chronological', - KeySchema: [ - { - AttributeName: 'timestamp', - KeyType: 'HASH', - }, - ], - Projection: { ProjectionType: 'ALL' }, - }, - { - IndexName: 'globalIndex', - KeySchema: [ - { - AttributeName: 'creator', - KeyType: 'HASH', - }, - { - AttributeName: 'timestamp', - KeyType: 'RANGE', - }, - ], - Projection: { ProjectionType: 'ALL' }, - }, - { - IndexName: 'binaryIndex', - KeySchema: [ - { - AttributeName: 'binaryKey', - KeyType: 'HASH', - }, - { - AttributeName: 'customKey', - KeyType: 'RANGE', - }, - ], - Projection: { - ProjectionType: 'INCLUDE', - NonKeyAttributes: [ - 'creator', - 'timestamp', - ], - }, - }, - ], - KeySchema: [ - { - AttributeName: 'partitionKey', - KeyType: 'HASH', - }, - { - AttributeName: 'timestamp', - KeyType: 'RANGE', - }, - ], - LocalSecondaryIndexes: [ - { - IndexName: 'localIndex', - KeySchema: [ - { - AttributeName: 'creator', - KeyType: 'RANGE', - }, - ], - Projection: { ProjectionType: 'KEYS_ONLY' }, - }, - ], - BillingMode: 'PAY_PER_REQUEST', - StreamSpecification: { StreamEnabled: false }, - SSESpecification: { Enabled: false }, - TableName: 'foo', - }, - ], - ]); - }); - - it( - 'should throw if no options were provided for a modeled index', - async () => { - const options = { - readCapacityUnits: 5, - writeCapacityUnits: 5, - }; - - await expect(mapper.createTable(IndexedItem, options)) - .rejects - .toMatchObject(new Error( - 'No options provided for chronological index' - )); - } - ); - }); - }); - - describe('#delete', () => { - const promiseFunc = jest.fn(() => Promise.resolve({Attributes: {}})); - const mockDynamoDbClient = { - config: {}, - deleteItem: jest.fn(() => ({promise: promiseFunc})), - }; - - beforeEach(() => { - promiseFunc.mockClear(); - mockDynamoDbClient.deleteItem.mockClear(); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - it( - 'should throw if the item does not provide a schema per the data mapper protocol', - async () => { - await expect(mapper.delete({ - [DynamoDbTable]: 'foo', - })).rejects.toMatchObject(new Error( - 'The provided item did not adhere to the DynamoDbDocument protocol. No object property was found at the `DynamoDbSchema` symbol' - )); - } - ); - - it( - 'should throw if the item does not provide a table name per the data mapper protocol', - async () => { - await expect(mapper.delete({ - [DynamoDbSchema]: {}, - })).rejects.toMatchObject(new Error( - 'The provided item did not adhere to the DynamoDbTable protocol. No string property was found at the `DynamoDbTable` symbol' - )); - } - ); - - it( - 'should use the table name specified in the supplied table definition', - async () => { - const tableName = 'foo'; - await mapper.delete({ - [DynamoDbTable]: tableName, - [DynamoDbSchema]: {}, - }); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .toMatchObject({TableName: tableName}); - } - ); - - it( - 'should apply a table name prefix provided to the mapper constructor', - async () => { - const tableNamePrefix = 'INTEG_'; - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - tableNamePrefix, - }); - const tableName = 'foo'; - await mapper.delete({ - [DynamoDbTable]: tableName, - [DynamoDbSchema]: {}, - }); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .toMatchObject({TableName: tableNamePrefix + tableName}); - } - ); - - it( - 'should marshall the supplied key according to the schema', - async () => { - await mapper.delete({ - fizz: 'buzz', - pop: new Date(60000), - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - keyType: 'HASH', - }, - pop: { - type: 'Date', - keyType: 'RANGE' - }, - }, - }); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .toMatchObject({ - Key: { - fizz: {S: 'buzz'}, - pop: {N: '60'}, - } - }); - } - ); - - it( - 'should ignore non-key fields when marshalling the key', - async () => { - await mapper.delete({ - fizz: 'buzz', - pop: new Date(60000), - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - keyType: 'HASH', - }, - pop: { - type: 'Date' - }, - }, - }); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .toMatchObject({ - Key: {fizz: {S: 'buzz'}} - }); - } - ); - - it( - 'should apply attribute names when marshalling the key', - async () => { - await mapper.delete({ - fizz: 'buzz', - pop: new Date(60000), - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Date' - }, - }, - }); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .toMatchObject({ - Key: {foo: {S: 'buzz'}} - }); - } - ); - - it( - 'should include a condition expression when the schema contains a version attribute', - async () => { - await mapper.delete({ - fizz: 'buzz', - pop: 21, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .toMatchObject({ - ConditionExpression: '#attr0 = :val1', - ExpressionAttributeNames: {'#attr0': 'pop'}, - ExpressionAttributeValues: {':val1': {N: '21'}}, - }); - } - ); - - it( - 'should not include a condition expression when the schema contains a version attribute but the value is undefined', - async () => { - await mapper.delete({ - fizz: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .not.toHaveProperty('ConditionExpression'); - } - ); - - it( - 'should not include a condition expression when the skipVersionCheck input parameter is true', - async () => { - await mapper.delete( - { - fizz: 'buzz', - pop: 21, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }, - {skipVersionCheck: true}, - ); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .not.toHaveProperty('ConditionExpression'); - } - ); - - it( - `should not include a condition expression when the mapper's default skipVersionCheck input parameter is true`, - async () => { - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - skipVersionCheck: true - }); - await mapper.delete({ - fizz: 'buzz', - pop: 21, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .not.toHaveProperty('ConditionExpression'); - } - ); - - it( - 'should combine the version condition with any other condition expression', - async () => { - await mapper.delete( - { - fizz: 'buzz', - pop: 21, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - quux: {type: 'Date'}, - }, - }, - { - condition: { - type: 'LessThan', - subject: 'quux', - object: 600000 - } - } - ); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .toMatchObject({ - ConditionExpression: '(#attr0 < :val1) AND (#attr2 = :val3)', - ExpressionAttributeNames: { - '#attr0': 'quux', - '#attr2': 'pop', - }, - ExpressionAttributeValues: { - ':val1': {N: '600000'}, - ':val3': {N: '21'} - }, - }); - } - ); - - it( - 'should not include ExpressionAttributeValues when a substitution has not been made', - async () => { - await mapper.delete( - { - fizz: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'bar', - keyType: 'HASH', - } - }, - }, - { - condition: new FunctionExpression( - 'attribute_not_exists', - new AttributePath('fizz') - ) - } - ); - - expect((mockDynamoDbClient.deleteItem.mock.calls[0] as any)[0]) - .toEqual({ - ConditionExpression: 'attribute_not_exists(#attr0)', - ExpressionAttributeNames: { - '#attr0': 'bar', - }, - TableName: 'foo', - Key: { - bar: { S: 'buzz' } - }, - ReturnValues: 'ALL_OLD' - }); - } - ); - - it('should unmarshall any returned attributes', async () => { - promiseFunc.mockImplementation(() => Promise.resolve({Attributes: { - fizz: {S: 'buzz'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }})); - - const result = await mapper.delete( - { - foo: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - foo: { - type: 'String', - attributeName: 'fizz', - keyType: 'HASH', - }, - bar: { - type: 'Set', - memberType: 'Number' - }, - baz: { - type: 'Tuple', - members: [{type: 'Boolean'}, {type: 'Number'}] - }, - }, - }, - {returnValues: "ALL_OLD"} - ); - - expect(result).toEqual({ - foo: 'buzz', - bar: new Set([1, 2, 3]), - baz: [true, 4], - }); - }); - - it('should support the legacy call pattern', async () => { - await mapper.delete({ - item: { - fizz: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - } - }); - }); - - it('should return instances of the correct class', async () => { - promiseFunc.mockImplementation(() => Promise.resolve({Attributes: { - fizz: {S: 'buzz'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }})); - - class Item { - foo?: string; - - constructor(foo?: string) { - this.foo = foo; - } - - get [DynamoDbTable]() { - return 'foo' - } - - get [DynamoDbSchema]() { - return { - foo: { - type: 'String', - attributeName: 'fizz', - keyType: 'HASH', - }, - bar: { - type: 'Set', - memberType: 'Number' - }, - baz: { - type: 'Tuple', - members: [{type: 'Boolean'}, {type: 'Number'}] - }, - } - } - } - - const result = await mapper.delete( - new Item('buzz'), - {returnValues: "ALL_OLD"} - ); - - expect(result).toEqual({ - foo: 'buzz', - bar: new Set([1, 2, 3]), - baz: [true, 4], - }); - }); - }); - - describe('#deleteTable', () => { - const waitPromiseFunc = jest.fn(() => Promise.resolve()); - const deleteTablePromiseFunc = jest.fn(() => Promise.resolve({})); - const mockDynamoDbClient = { - config: {}, - deleteTable: jest.fn(() => ({promise: deleteTablePromiseFunc})), - waitFor: jest.fn(() => ({promise: waitPromiseFunc})), - }; - - beforeEach(() => { - deleteTablePromiseFunc.mockClear(); - mockDynamoDbClient.deleteTable.mockClear(); - waitPromiseFunc.mockClear(); - mockDynamoDbClient.waitFor.mockClear(); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - class Item { - get [DynamoDbTable]() { return 'foo' } - - get [DynamoDbSchema]() { - return { id: { type: 'String', keyType: 'HASH' } }; - } - } - - it( - 'should make and send a DeleteTable request and wait for it to take effect', - async () => { - await mapper.deleteTable(Item); - - expect(mockDynamoDbClient.deleteTable.mock.calls).toEqual([ - [ { TableName: 'foo' } ], - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls).toEqual([ - [ 'tableNotExists', { TableName: 'foo' } ], - ]); - }); - }); - - - describe('#ensureGlobalSecondaryIndexExists', () => { - const waitPromiseFunc = jest.fn(() => Promise.resolve()); - const describeTablePromiseFunc = jest.fn(() => Promise.resolve({ - Table: { - TableStatus: 'ACTIVE', - GlobalSecondaryIndexes: [ - { - IndexName: 'DescriptionIndex' - } - ], - } - } as DescribeTableOutput)); - const mockDynamoDbClient = { - config: {}, - describeTable: jest.fn(() => ({promise: describeTablePromiseFunc})), - waitFor: jest.fn(() => ({promise: waitPromiseFunc})), - }; - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - mapper.createGlobalSecondaryIndex = jest.fn(() => Promise.resolve()); - - beforeEach(() => { - (mapper.createGlobalSecondaryIndex as any).mockClear(); - mockDynamoDbClient.describeTable.mockClear(); - waitPromiseFunc.mockClear(); - mockDynamoDbClient.waitFor.mockClear(); - }); - - let tableName = 'foo'; - let schema = { - id: { - type: 'String', - keyType: 'HASH' - }, - description: { - type: 'String', - indexKeyConfigurations: { - DescriptionIndex: 'HASH' - } - } - }; - - class Item { - get [DynamoDbTable]() { return tableName } - - get [DynamoDbSchema]() { return schema; } - } - - const DescriptionIndex: GlobalSecondaryIndexOptions = { - projection: 'all', - readCapacityUnits: 1, - type: 'global', - writeCapacityUnits: 1 - }; - - it( - 'should resolve immediately if the table exists, is active, and the GSI already exists', - async () => { - await mapper.ensureGlobalSecondaryIndexExists(Item, 'DescriptionIndex', { - indexOptions: { - DescriptionIndex - }, - readCapacityUnits: 5, - writeCapacityUnits: 5, - }); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - expect((mapper.createGlobalSecondaryIndex as any).mock.calls.length).toBe(0); - } - ); - - it( - 'should attempt to create the index if the table exists in the ACTIVE state but the specified index does not exist', - async () => { - describeTablePromiseFunc.mockImplementationOnce(() => Promise.resolve({ - Table: { TableStatus: 'ACTIVE' } - } as DescribeTableOutput)); - await mapper.ensureGlobalSecondaryIndexExists(Item, 'DescriptionIndex', { - indexOptions: { - DescriptionIndex - }, - readCapacityUnits: 5, - writeCapacityUnits: 5, - }); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect((mapper.createGlobalSecondaryIndex as any).mock.calls.length).toBe(1); - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - } - ); - - it( - 'should rethrow if "describeTable" throws a "ResourceNotFoundException"', - async () => { - const expectedError = new Error('No such table!'); - expectedError.name = 'ResourceNotFoundException'; - describeTablePromiseFunc.mockImplementationOnce(async () => { - throw expectedError; - }); - - await expect(mapper.ensureGlobalSecondaryIndexExists(Item, 'DescriptionIndex', { - indexOptions: { - DescriptionIndex - }, - readCapacityUnits: 5, - writeCapacityUnits: 5, - })) - .rejects - .toMatchObject(expectedError); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - } - ); - }); - - describe('#ensureTableExists', () => { - const waitPromiseFunc = jest.fn(() => Promise.resolve()); - const describeTablePromiseFunc = jest.fn(() => Promise.resolve({ - Table: { TableStatus: 'ACTIVE' } - } as DescribeTableOutput)); - const mockDynamoDbClient = { - config: {}, - describeTable: jest.fn(() => ({promise: describeTablePromiseFunc})), - waitFor: jest.fn(() => ({promise: waitPromiseFunc})), - }; - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - mapper.createTable = jest.fn(() => Promise.resolve()); - - beforeEach(() => { - (mapper.createTable as any).mockClear(); - mockDynamoDbClient.describeTable.mockClear(); - waitPromiseFunc.mockClear(); - mockDynamoDbClient.waitFor.mockClear(); - }); - - let tableName = 'foo'; - let schema = { - id: { type: 'String', keyType: 'HASH' } - }; - - class Item { - get [DynamoDbTable]() { return tableName } - - get [DynamoDbSchema]() { return schema; } - } - - it( - 'should resolve immediately if the table exists and is active', - async () => { - await mapper.ensureTableExists(Item, { - readCapacityUnits: 5, - writeCapacityUnits: 5, - }); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - expect((mapper.createTable as any).mock.calls.length).toBe(0); - } - ); - - it( - 'should wait for the table to exist if its state is not "ACTIVE"', - async () => { - describeTablePromiseFunc.mockImplementationOnce(() => Promise.resolve({ - Table: { TableStatus: 'CREATING' } - })) - await mapper.ensureTableExists(Item, { - readCapacityUnits: 5, - writeCapacityUnits: 5, - }); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(1); - expect((mapper.createTable as any).mock.calls.length).toBe(0); - } - ); - - it( - 'should attempt to create the table if "describeTable" throws a "ResourceNotFoundException"', - async () => { - describeTablePromiseFunc.mockImplementationOnce(async () => { - const err = new Error('No such table!'); - err.name = 'ResourceNotFoundException'; - throw err; - }); - - const options = { readCapacityUnits: 5, writeCapacityUnits: 5 }; - await mapper.ensureTableExists(Item, options); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect((mapper.createTable as any).mock.calls).toEqual([ - [Item, options], - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - } - ); - - it( - 'should rethrow any service exception other than "ResourceNotFoundException"', - async () => { - describeTablePromiseFunc.mockImplementationOnce( - () => Promise.reject(new Error('PANIC')) - ); - - const options = { readCapacityUnits: 5, writeCapacityUnits: 5 }; - - await expect(mapper.ensureTableExists(Item, options)) - .rejects - .toMatchObject(new Error('PANIC')); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect((mapper.createTable as any).mock.calls.length).toBe(0); - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - } - ); - }); - - describe('#ensureTableNotExists', () => { - const waitPromiseFunc = jest.fn(() => Promise.resolve()); - const describeTablePromiseFunc = jest.fn(() => Promise.resolve({})); - const mockDynamoDbClient = { - config: {}, - describeTable: jest.fn(() => ({promise: describeTablePromiseFunc})), - waitFor: jest.fn(() => ({promise: waitPromiseFunc})), - }; - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - mapper.deleteTable = jest.fn(() => Promise.resolve()); - - beforeEach(() => { - (mapper.deleteTable as any).mockClear(); - mockDynamoDbClient.describeTable.mockClear(); - waitPromiseFunc.mockClear(); - mockDynamoDbClient.waitFor.mockClear(); - }); - - let tableName = 'foo'; - let schema = { - id: { type: 'String', keyType: 'HASH' } - }; - - class Item { - get [DynamoDbTable]() { return tableName } - - get [DynamoDbSchema]() { return schema; } - } - - it( - 'should resolve immediately if the table does not exist', - async () => { - describeTablePromiseFunc.mockImplementationOnce(async () => { - const err = new Error('No such table!'); - err.name = 'ResourceNotFoundException'; - throw err; - }); - - await mapper.ensureTableNotExists(Item); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - expect((mapper.deleteTable as any).mock.calls.length).toBe(0); - } - ); - - it( - 'should wait for the table not to exist if its state is not "DELETING"', - async () => { - describeTablePromiseFunc.mockImplementationOnce(() => Promise.resolve({ - Table: { TableStatus: 'DELETING' } - })) - await mapper.ensureTableNotExists(Item); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls).toEqual([ - [ 'tableNotExists', { TableName: tableName } ], - ]); - expect((mapper.deleteTable as any).mock.calls.length).toBe(0); - } - ); - - it('should delete the table if its state is "ACTIVE"', async () => { - describeTablePromiseFunc.mockImplementationOnce(() => Promise.resolve({ - Table: { TableStatus: 'ACTIVE' } - })) - await mapper.ensureTableNotExists(Item); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - expect((mapper.deleteTable as any).mock.calls.length).toBe(1); - }); - - it( - 'should wait for the table to exist if its state is "CREATING", then delete it', - async () => { - describeTablePromiseFunc.mockImplementationOnce(() => Promise.resolve({ - Table: { TableStatus: 'CREATING' } - })) - await mapper.ensureTableNotExists(Item); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls).toEqual([ - [ 'tableExists', { TableName: tableName } ], - ]); - expect((mapper.deleteTable as any).mock.calls.length).toBe(1); - } - ); - - it( - 'should wait for the table to exist if its state is "UPDATING", then delete it', - async () => { - describeTablePromiseFunc.mockImplementationOnce(() => Promise.resolve({ - Table: { TableStatus: 'UPDATING' } - })) - await mapper.ensureTableNotExists(Item); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect(mockDynamoDbClient.waitFor.mock.calls).toEqual([ - [ 'tableExists', { TableName: tableName } ], - ]); - expect((mapper.deleteTable as any).mock.calls.length).toBe(1); - } - ); - - it( - 'should rethrow any service exception other than "ResourceNotFoundException"', - async () => { - describeTablePromiseFunc.mockImplementationOnce( - () => Promise.reject(new Error('PANIC')) - ); - - await expect(mapper.ensureTableNotExists(Item)) - .rejects - .toMatchObject(new Error('PANIC')); - - expect(mockDynamoDbClient.describeTable.mock.calls).toEqual([ - [{ TableName: tableName }] - ]); - - expect((mapper.deleteTable as any).mock.calls.length).toBe(0); - expect(mockDynamoDbClient.waitFor.mock.calls.length).toBe(0); - } - ); - }); - - describe('#get', () => { - const promiseFunc = jest.fn(() => Promise.resolve({Item: {}} as GetItemOutput)); - const mockDynamoDbClient = { - config: {}, - getItem: jest.fn(() => ({promise: promiseFunc})), - }; - - beforeEach(() => { - promiseFunc.mockClear(); - mockDynamoDbClient.getItem.mockClear(); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - it( - 'should throw if the item does not provide a schema per the data mapper protocol', - async () => { - await expect(mapper.get({ - [DynamoDbTable]: 'foo', - })).rejects.toMatchObject(new Error( - 'The provided item did not adhere to the DynamoDbDocument protocol. No object property was found at the `DynamoDbSchema` symbol' - )); - } - ); - - it( - 'should throw if the item does not provide a table name per the data mapper protocol', - async () => { - await expect(mapper.get({ - [DynamoDbSchema]: {}, - })).rejects.toMatchObject(new Error( - 'The provided item did not adhere to the DynamoDbTable protocol. No string property was found at the `DynamoDbTable` symbol' - )); - } - ); - - it( - 'should use the table name specified in the supplied table definition', - async () => { - const tableName = 'foo'; - await mapper.get({ - [DynamoDbTable]: tableName, - [DynamoDbSchema]: {}, - }); - - expect((mockDynamoDbClient.getItem.mock.calls[0] as any)[0]) - .toMatchObject({TableName: tableName}); - } - ); - - it( - 'should apply a table name prefix provided to the mapper constructor', - async () => { - const tableNamePrefix = 'INTEG_'; - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - tableNamePrefix, - }); - const tableName = 'foo'; - await mapper.get({ - [DynamoDbTable]: tableName, - [DynamoDbSchema]: {}, - }); - - expect((mockDynamoDbClient.getItem.mock.calls[0] as any)[0]) - .toMatchObject({TableName: tableNamePrefix + tableName}); - } - ); - - it( - 'should marshall the supplied key according to the schema', - async () => { - await mapper.get({ - fizz: 'buzz', - pop: new Date(60000), - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - keyType: 'HASH', - }, - pop: { - type: 'Date', - keyType: 'RANGE' - }, - }, - }); - - expect((mockDynamoDbClient.getItem.mock.calls[0] as any)[0]) - .toMatchObject({ - Key: { - fizz: {S: 'buzz'}, - pop: {N: '60'}, - } - }); - } - ); - - it( - 'should ignore non-key fields when marshalling the key', - async () => { - await mapper.get({ - fizz: 'buzz', - pop: new Date(60000), - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - keyType: 'HASH', - }, - pop: { - type: 'Date' - }, - }, - }); - - expect((mockDynamoDbClient.getItem.mock.calls[0] as any)[0]) - .toMatchObject({ - Key: {fizz: {S: 'buzz'}} - }); - } - ); - - it( - 'should apply attribute names when marshalling the key', - async () => { - await mapper.get({ - fizz: 'buzz', - pop: new Date(60000), - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Date' - }, - }, - }); - - expect((mockDynamoDbClient.getItem.mock.calls[0] as any)[0]) - .toMatchObject({ - Key: {foo: {S: 'buzz'}} - }); - } - ); - - it( - 'should request a consistent read if the readConsistency is StronglyConsistent', - async () => { - await mapper.get( - { - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: {}, - }, - {readConsistency: 'strong'} - ); - - expect((mockDynamoDbClient.getItem.mock.calls[0] as any)[0]) - .toMatchObject({ConsistentRead: true}); - } - ); - - it( - 'should apply the read consistency provided to the mapper constructor if not supplied to the operation', - async () => { - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - readConsistency: 'strong', - }); - await mapper.get({ - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: {}, - }); - - expect((mockDynamoDbClient.getItem.mock.calls[0] as any)[0]) - .toMatchObject({ConsistentRead: true}); - } - ); - - it('should serialize a provided projection expression', async () => { - await mapper.get( - { - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Date' - }, - }, - }, - {projection: ['fizz', 'pop']}, - ); - - expect((mockDynamoDbClient.getItem.mock.calls[0] as any)[0]) - .toMatchObject({ - ProjectionExpression: '#attr0, #attr1', - ExpressionAttributeNames: { - '#attr0': 'foo', - '#attr1': 'pop', - }, - }); - }); - - it( - 'should convert an empty (item not found) response into a rejected promise whose rejection includes the request sent to DynamoDB', - () => { - promiseFunc.mockImplementation(() => Promise.resolve({})); - - return expect(mapper.get( - { - fizz: 'buzz', - pop: new Date(60000), - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Date' - }, - }, - }, - { - readConsistency: 'strong', - projection: ['fizz', 'pop'], - } - )).rejects.toMatchObject(new ItemNotFoundException({ - TableName: 'foo', - Key: {foo: {S: 'buzz'}}, - ConsistentRead: true, - ProjectionExpression: '#attr0, #attr1', - ExpressionAttributeNames: { - '#attr0': 'foo', - '#attr1': 'pop', - }, - })); - } - ); - - it('should unmarshall the response using the table schema', async () => { - promiseFunc.mockImplementation(() => Promise.resolve({ - Item: { - foo: {S: 'buzz'}, - pop: {N: '60'}, - } - })); - - const result = await mapper.get({ - fizz: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Date' - }, - }, - }); - - expect(result).toEqual({ - fizz: 'buzz', - pop: new Date(60000), - }); - }); - - it('should support the legacy call pattern', async () => { - await mapper.get({ - item: { - fizz: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - } - }); - }); - - it('should return instances of the correct class', async () => { - promiseFunc.mockImplementation(() => Promise.resolve({ - Item: { - foo: {S: 'buzz'}, - pop: {N: '60'}, - } - })); - - class Item { - fizz?: string; - - constructor(fizz?: string) { - this.fizz = fizz; - } - - get [DynamoDbTable]() { - return 'foo'; - } - - get [DynamoDbSchema]() { - return { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Date' - }, - } - } - } - - const result = await mapper.get(new Item('buzz')); - - expect(result).toEqual({ - fizz: 'buzz', - pop: new Date(60000), - }); - expect(result).toBeInstanceOf(Item); - }); - }); - - describe('#parallelScan', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - scan: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Items: []})); - mockDynamoDbClient.scan.mockClear(); - mockDynamoDbClient.scan.mockImplementation(() => { - return {promise: promiseFunc}; - }); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - class ScannableItem { - foo!: string; - - get [DynamoDbTable]() { return 'foo'; } - get [DynamoDbSchema]() { - return { - foo: { - type: 'String', - attributeName: 'fizz', - keyType: 'HASH', - }, - bar: { - type: 'Set', - memberType: 'Number' - }, - baz: { - type: 'Tuple', - members: [{type: 'Boolean'}, {type: 'Number'}] - }, - }; - } - - static fromKey(key: string) { - const target = new ScannableItem(); - target.foo = key; - return target; - } - } - - it( - 'should execute multiple requests in parallel when performing a scan with multiple segments', - async () => { - const segments = 2; - const keys = ['snap', 'crackle', 'pop', 'foo', 'bar', 'baz']; - let index = 0; - - // Ensure that the first promise won't resolve immediately. This - // would block progress on a sequential scan but should pose no - // problem for a parallel one. - promiseFunc.mockImplementationOnce(() => new Promise(resolve => { - setTimeout( - resolve.bind(null, { - Items: [ - { - fizz: {S: 'quux'}, - bar: {NS: ['5', '12', '13']}, - baz: {L: [{BOOL: true}, {N: '101'}]}, - }, - ], - }), - 50, - ); - } - )); - - // Enqueue a number of responses that will resolve synchronously - for (const key of keys) { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: key}, - bar: {NS: [ - (++index).toString(10), - (++index).toString(10), - ]}, - baz: {L: [ - {BOOL: index % 2 === 0}, - {N: (++index).toString(10)} - ]}, - }, - ], - LastEvaluatedKey: {fizz: {S: key}}, - })); - } - - // Enqueue a final page for this segment - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const results = mapper.parallelScan(ScannableItem, segments); - - const result: Array = []; - for await (const res of results) { - result.push(res); - } - - expect(result).toEqual([ - { - foo: 'snap', - bar: new Set([1, 2]), - baz: [true, 3], - }, - { - foo: 'crackle', - bar: new Set([4, 5]), - baz: [false, 6], - }, - { - foo: 'pop', - bar: new Set([7, 8]), - baz: [true, 9], - }, - { - foo: 'foo', - bar: new Set([10, 11]), - baz: [false, 12], - }, - { - foo: 'bar', - bar: new Set([13, 14]), - baz: [true, 15], - }, - { - foo: 'baz', - bar: new Set([16, 17]), - baz: [false, 18], - }, - { - foo: 'quux', - bar: new Set([5, 12, 13]), - baz: [true, 101], - }, - ]); - - for (const scannedItem of result) { - expect(scannedItem).toBeInstanceOf(ScannableItem); - } - } - ); - - it('should return undefined for lastEvaluatedKey on the paginator', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2']}, - baz: {L: [ - {BOOL: true}, - {N: '3'} - ]}, - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const paginator = mapper.parallelScan(ScannableItem, 2).pages(); - - for await (const _ of paginator) { - expect(paginator.lastEvaluatedKey).toBeUndefined(); - } - }); - - it('should return the current state for all segments', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2']}, - baz: {L: [ - {BOOL: true}, - {N: '3'} - ]}, - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['4', '5']}, - baz: {L: [ - {BOOL: true}, - {N: '6'} - ]}, - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const iterator = mapper.parallelScan(ScannableItem, 2); - - for await (const _ of iterator) { - expect(iterator.pages().scanState) - .toMatchObject([ - { - initialized: true, - lastEvaluatedKey: ScannableItem.fromKey('pop') - }, - {initialized: false}, - ]); - break; - } - }); - - it('should resume from a provided scanState', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const scanState: ParallelScanState = [ - {initialized: true}, - {initialized: true, lastEvaluatedKey: {foo: 'bar'}}, - {initialized: true, lastEvaluatedKey: {foo: 'baz'}}, - ]; - - for await (const _ of mapper.parallelScan(ScannableItem, 3, {scanState})) { - // pass - } - - expect(mockDynamoDbClient.scan.mock.calls).toEqual([ - [{ - TableName: 'foo', - ExclusiveStartKey: {fizz: {S: 'bar'}}, - Segment: 1, - TotalSegments: 3 - }], - [{ - TableName: 'foo', - ExclusiveStartKey: {fizz: {S: 'baz'}}, - Segment: 2, - TotalSegments: 3 - }], - ]); - }); - - it('should support the legacy call pattern', async () => { - const iter = mapper.parallelScan({ - valueConstructor: ScannableItem, - segments: 4 - }); - await iter.next(); - }); - }); - - describe('#put', () => { - const promiseFunc = jest.fn(() => Promise.resolve({Item: {}} as PutItemOutput)); - const mockDynamoDbClient = { - config: {}, - putItem: jest.fn(() => ({promise: promiseFunc})), - }; - - beforeEach(() => { - promiseFunc.mockClear(); - mockDynamoDbClient.putItem.mockClear(); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - it( - 'should throw if the item does not provide a schema per the data mapper protocol', - async () => { - await expect(mapper.put({ - [DynamoDbTable]: 'foo', - })).rejects.toMatchObject(new Error( - 'The provided item did not adhere to the DynamoDbDocument protocol. No object property was found at the `DynamoDbSchema` symbol' - )); - } - ); - - it( - 'should throw if the item does not provide a table name per the data mapper protocol', - async () => { - await expect(mapper.put({ - [DynamoDbSchema]: {}, - })).rejects.toMatchObject(new Error( - 'The provided item did not adhere to the DynamoDbTable protocol. No string property was found at the `DynamoDbTable` symbol' - )); - } - ); - - it( - 'should use the table name specified in the supplied table definition', - async () => { - const tableName = 'foo'; - await mapper.put({ - [DynamoDbTable]: tableName, - [DynamoDbSchema]: {}, - }); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .toMatchObject({TableName: tableName}); - } - ); - - it( - 'should apply a table name prefix provided to the mapper constructor', - async () => { - const tableNamePrefix = 'INTEG_'; - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - tableNamePrefix, - }); - const tableName = 'foo'; - await mapper.put({ - [DynamoDbTable]: tableName, - [DynamoDbSchema]: {}, - }); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .toMatchObject({TableName: tableNamePrefix + tableName}); - } - ); - - it( - 'should marshall the supplied item according to the schema', - async () => { - await mapper.put({ - fizz: 'buzz', - pop: new Date(60000), - snap: false, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: {type: 'String'}, - pop: {type: 'Date'}, - snap: { - type: 'Boolean', - attributeName: 'crackle', - } - }, - }); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .toMatchObject({ - Item: { - fizz: {S: 'buzz'}, - pop: {N: '60'}, - crackle: {BOOL: false}, - } - }); - } - ); - - it( - 'should include a condition expression and increment the version number when the schema contains a version attribute', - async () => { - await mapper.put({ - fizz: 'buzz', - pop: 21, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .toMatchObject({ - Item: { - foo: {S: 'buzz'}, - pop: {N: '22'}, - }, - ConditionExpression: '#attr0 = :val1', - ExpressionAttributeNames: {'#attr0': 'pop'}, - ExpressionAttributeValues: {':val1': {N: '21'}}, - }); - } - ); - - it( - 'should include a condition expression requiring that no versioned item be present when the schema contains a version attribute but the value is undefined', - async () => { - await mapper.put({ - fizz: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .toEqual({ - Item: { - foo: {S: 'buzz'}, - pop: {N: '0'}, - }, - ConditionExpression: 'attribute_not_exists(#attr0)', - ExpressionAttributeNames: {'#attr0': 'pop'}, - TableName: 'foo', - }); - } - ); - - it( - 'should not include a condition expression when the skipVersionCheck input parameter is true', - async () => { - await mapper.put( - { - fizz: 'buzz', - pop: 21, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }, - {skipVersionCheck: true}, - ); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .not.toHaveProperty('ConditionExpression'); - } - ); - - it( - `should not include a condition expression when the mapper's default skipVersionCheck input parameter is true`, - async () => { - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - skipVersionCheck: true - }); - await mapper.put({ - fizz: 'buzz', - pop: 21, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .not.toHaveProperty('ConditionExpression'); - } - ); - - it( - 'should combine the version condition with any other condition expression', - async () => { - await mapper.put( - { - fizz: 'buzz', - pop: 21, - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - quux: {type: 'Date'}, - }, - }, - { - condition: { - type: 'LessThan', - subject: 'quux', - object: 600000 - } - } - ); - - expect((mockDynamoDbClient.putItem.mock.calls[0] as any)[0]) - .toMatchObject({ - ConditionExpression: '(#attr0 < :val1) AND (#attr2 = :val3)', - ExpressionAttributeNames: { - '#attr0': 'quux', - '#attr2': 'pop', - }, - ExpressionAttributeValues: { - ':val1': {N: '600000'}, - ':val3': {N: '21'} - }, - }); - } - ); - - it('should return the unmarshalled input', async () => { - promiseFunc.mockImplementation(() => Promise.resolve({} as PutItemOutput)); - - const result = await mapper.put({ - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - foo: { - type: 'String', - attributeName: 'fizz', - defaultProvider: () => 'keykey', - keyType: 'HASH', - }, - bar: { - type: 'Number', - versionAttribute: true - }, - }, - }); - - expect(result).toMatchObject({ - foo: 'keykey', - bar: 0 - }) - }); - - it('should support the legacy call pattern', async () => { - await mapper.put({ - item: { - fizz: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - } - }); - }); - - it('should return an instance of the provided class', async () => { - promiseFunc.mockImplementation(() => Promise.resolve({})); - - class Item { - get [DynamoDbTable]() { - return 'foo'; - } - - get [DynamoDbSchema] () { - return { - foo: { - type: 'String', - attributeName: 'fizz', - defaultProvider: () => 'keykey', - keyType: 'HASH', - }, - bar: { - type: 'Number', - versionAttribute: true - }, - }; - } - } - const result = await mapper.put(new Item); - - expect(result).toMatchObject({ - foo: 'keykey', - bar: 0 - }); - - expect(result).toBeInstanceOf(Item); - }); - }); - - describe('#query', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - query: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Attributes: {}})); - mockDynamoDbClient.query.mockClear(); - mockDynamoDbClient.query.mockImplementation(() => ({promise: promiseFunc})); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - class QueryableItem { - snap!: string; - fizz?: Array; - - get [DynamoDbTable]() { return 'foo'; } - get [DynamoDbSchema]() { - return { - snap: { - type: 'String', - keyType: 'HASH', - }, - fizz: { - type: 'List', - memberType: {type: 'String'}, - attributeName: 'fizzes', - }, - }; - } - - static fromKey(key: string) { - const target = new QueryableItem(); - target.snap = key; - return target; - } - } - - it( - 'should throw if the item does not provide a schema per the data mapper protocol', - () => { - expect(() => mapper.query( - class { - get [DynamoDbTable]() { return 'foo'; } - }, - {foo: 'buzz'} - )).toThrow( - 'The provided item did not adhere to the DynamoDbDocument protocol. No object property was found at the `DynamoDbSchema` symbol' - ); - } - ); - - it( - 'should throw if the item does not provide a table name per the data mapper protocol', - () => { - expect(() => mapper.query( - class { - get [DynamoDbSchema]() { return {}; } - }, - {foo: 'buzz'} - )).toThrow( - 'The provided item did not adhere to the DynamoDbTable protocol. No string property was found at the `DynamoDbTable` symbol' - ); - } - ); - - it( - 'should paginate over results and return a promise for each item', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - class QueryableItem { - get [DynamoDbTable]() { return 'foo'; } - get [DynamoDbSchema]() { - return { - foo: { - type: 'String', - attributeName: 'fizz', - keyType: 'HASH', - }, - bar: { - type: 'Set', - memberType: 'Number' - }, - baz: { - type: 'Tuple', - members: [{type: 'Boolean'}, {type: 'Number'}] - }, - }; - } - } - - const results: any[] = []; - for await (const res of mapper.query(QueryableItem, {foo: 'buzz'})) { - results.push(res); - } - - expect(results).toEqual([ - { - foo: 'snap', - bar: new Set([1, 2, 3]), - baz: [true, 4], - }, - { - foo: 'crackle', - bar: new Set([5, 6, 7]), - baz: [false, 8], - }, - { - foo: 'pop', - bar: new Set([9, 12, 30]), - baz: [true, 24], - }, - ]); - - for (const queriedItem of results) { - expect(queriedItem).toBeInstanceOf(QueryableItem); - } - } - ); - - it( - 'should request a consistent read if the readConsistency is StronglyConsistent', - async () => { - const results = mapper.query( - QueryableItem, - {foo: 'bar'}, - {readConsistency: 'strong'} - ); - - await results.next(); - - expect(mockDynamoDbClient.query.mock.calls[0][0]) - .toMatchObject({ConsistentRead: true}); - } - ); - - it('should allow a condition expression as the keyCondition', async () => { - const results = mapper.query( - class { - get [DynamoDbTable]() { return 'foo'; } - get [DynamoDbSchema]() { - return { - snap: { - type: 'String', - keyType: 'HASH', - }, - fizz: { - type: 'String', - keyType: 'RANGE', - }, - }; - } - }, - { - type: 'And', - conditions: [ - { - type: 'Equals', - subject: 'snap', - object: 'crackle', - }, - new FunctionExpression( - 'begins_with', - new AttributePath('fizz'), - 'buz' - ) - ] - }, - ); - - await results.next(); - - expect(mockDynamoDbClient.query.mock.calls[0][0]) - .toMatchObject({ - KeyConditionExpression: '(#attr0 = :val1) AND (begins_with(#attr2, :val3))', - ExpressionAttributeNames: { - '#attr0': 'snap', - '#attr2': 'fizz', - }, - ExpressionAttributeValues: { - ':val1': {S: 'crackle'}, - ':val3': {S: 'buz'} - }, - }); - }); - - it( - 'should allow a condition expression predicate in the keyCondition', - async () => { - const results = mapper.query( - QueryableItem, - { - snap: 'crackle', - pop: between(10, 20), - }, - ); - - await results.next(); - - expect(mockDynamoDbClient.query.mock.calls[0][0]) - .toMatchObject({ - KeyConditionExpression: '(#attr0 = :val1) AND (#attr2 BETWEEN :val3 AND :val4)', - ExpressionAttributeNames: { - '#attr0': 'snap', - '#attr2': 'pop', - }, - ExpressionAttributeValues: { - ':val1': {S: 'crackle'}, - ':val3': {N: '10'}, - ':val4': {N: '20'} - }, - }); - } - ); - - it('should allow a filter expression', async () => { - const results = mapper.query( - QueryableItem, - {snap: 'crackle'}, - { - filter: { - subject: 'fizz[1]', - ...inList('buzz', 'pop'), - }, - } - ); - - await results.next(); - - expect(mockDynamoDbClient.query.mock.calls[0][0]) - .toMatchObject({ - FilterExpression: '#attr2[1] IN (:val3, :val4)', - ExpressionAttributeNames: { - '#attr0': 'snap', - '#attr2': 'fizzes', - }, - ExpressionAttributeValues: { - ':val1': {S: 'crackle'}, - ':val3': {S: 'buzz'}, - ':val4': {S: 'pop'}, - }, - }); - }); - - it('should allow a projection expression', async () => { - const results = mapper.query( - QueryableItem, - {snap: 'crackle'}, - {projection: ['snap', 'fizz[1]']} - ); - - await results.next(); - - expect(mockDynamoDbClient.query.mock.calls[0][0]) - .toMatchObject({ - ProjectionExpression: '#attr0, #attr2[1]', - ExpressionAttributeNames: { - '#attr0': 'snap', - '#attr2': 'fizzes', - }, - ExpressionAttributeValues: { - ':val1': {S: 'crackle'}, - }, - }); - }); - - it('should allow a start key', async () => { - const results = mapper.query( - class { - get [DynamoDbTable]() { return 'foo'; } - get [DynamoDbSchema]() { - return { - snap: { - type: 'String', - keyType: 'HASH', - }, - fizz: { - type: 'Number', - keyType: 'RANGE' - }, - }; - } - }, - {snap: 'crackle'}, - {startKey: {fizz: 100}} - ); - - await results.next(); - - expect(mockDynamoDbClient.query.mock.calls[0][0]) - .toMatchObject({ - ExclusiveStartKey: { - fizz: {N: '100'}, - } - }); - }); - - it('supports the legacy call pattern', async () => { - const iter = mapper.query({ - valueConstructor: QueryableItem, - keyCondition: {snap: 'crackle'}, - indexName: 'baz-index', - pageSize: 1, - scanIndexForward: true - }); - - await iter.next(); - - expect(mockDynamoDbClient.query.mock.calls[0][0]) - .toEqual({ - TableName: 'foo', - KeyConditionExpression: '#attr0 = :val1', - ExpressionAttributeNames: { - '#attr0': 'snap', - }, - ExpressionAttributeValues: { - ':val1': {S: 'crackle'} - }, - IndexName: 'baz-index', - Limit: 1, - ScanIndexForward: true - }); - }); - - it('should track usage metadata', async () => { - const ScannedCount = 3; - const ConsumedCapacity = { - TableName: 'foo', - CapacityUnits: 4 - }; - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { snap: {S: 'foo'} }, - { snap: {S: 'bar'} }, - ], - LastEvaluatedKey: {snap: {S: 'bar'}}, - Count: 2, - ScannedCount, - ConsumedCapacity, - })); - - const iterator = mapper.query(QueryableItem, {snap: 'crackle'}); - await iterator.next(); - - // only items actually yielded should be counted in `count` - expect(iterator.count).toBe(1); - // `consumedCapacity` and `scannedCount` should relay information - // from the API response - expect(iterator.scannedCount).toBe(ScannedCount); - expect(iterator.consumedCapacity).toEqual(ConsumedCapacity); - }); - - it('should support detaching the paginator', async () => { - const ScannedCount = 3; - const ConsumedCapacity = { - TableName: 'foo', - CapacityUnits: 4 - }; - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { snap: {S: 'foo'} }, - { snap: {S: 'bar'} }, - ], - Count: 2, - ScannedCount, - ConsumedCapacity, - })); - - const paginator = mapper.query(QueryableItem, {snap: 'crackle'}).pages(); - for await (const page of paginator) { - expect(page).toEqual([ - QueryableItem.fromKey('foo'), - QueryableItem.fromKey('bar'), - ]); - } - - expect(paginator.count).toBe(2); - expect(paginator.scannedCount).toBe(ScannedCount); - expect(paginator.consumedCapacity).toEqual(ConsumedCapacity); - }); - - it('should cease iteration once the limit has been reached', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { snap: {S: 'snap'} }, - { snap: {S: 'crackle'} }, - { snap: {S: 'pop'} }, - ], - LastEvaluatedKey: {snap: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { snap: {S: 'fizz'} }, - ], - LastEvaluatedKey: {snap: {S: 'fizz'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { snap: {S: 'buzz'} }, - ], - LastEvaluatedKey: {snap: {S: 'buzz'}}, - })); - - const results = mapper.query(QueryableItem, {snap: 'crackle'}, { limit: 5 }); - - for await (const _ of results) { - // pass - } - - expect(results.pages().lastEvaluatedKey) - .toEqual(QueryableItem.fromKey('buzz')); - - expect(mockDynamoDbClient.query.mock.calls).toEqual([ - [{ - TableName: 'foo', - Limit: 5, - KeyConditionExpression: '#attr0 = :val1', - ExpressionAttributeNames: { '#attr0': 'snap' }, - ExpressionAttributeValues: { ':val1': {S: 'crackle'} }, - }], - [{ - TableName: 'foo', - Limit: 2, - KeyConditionExpression: '#attr0 = :val1', - ExpressionAttributeNames: { '#attr0': 'snap' }, - ExpressionAttributeValues: { ':val1': {S: 'crackle'} }, - ExclusiveStartKey: { - snap: {S: 'pop'} - } - }], - [{ - TableName: 'foo', - Limit: 1, - KeyConditionExpression: '#attr0 = :val1', - ExpressionAttributeNames: { '#attr0': 'snap' }, - ExpressionAttributeValues: { ':val1': {S: 'crackle'} }, - ExclusiveStartKey: { - snap: {S: 'fizz'} - } - }] - ]); - }); - - describe('startKey serialization', () => { - class MyItem { - snap?: string; - crackle?: number; - pop?: Date; - - constructor(key?: string) { - this.snap = key; - } - - get [DynamoDbTable]() { return 'table'; } - get [DynamoDbSchema]() { - return { - snap: { - type: 'String', - keyType: 'HASH', - }, - crackle: { - type: 'Number', - keyType: 'RANGE', - defaultProvider: () => 0, - indexKeyConfigurations: { - myIndex: { keyType: 'RANGE' } - } - }, - pop: { - type: 'Date', - defaultProvider: () => new Date, - indexKeyConfigurations: { - myIndex: { keyType: 'HASH' } - } - }, - }; - } - } - - it('should not inject default values into the startKey', async () => { - const iter = mapper.query( - MyItem, - { snap: 'key' }, - { startKey: new MyItem('key') } - ); - await iter.next(); - - expect(mockDynamoDbClient.query.mock.calls[0][0].ExclusiveStartKey) - .toEqual({ - snap: {S: 'key'}, - }); - }); - }); - }); - - describe('#scan', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - scan: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Items: []})); - mockDynamoDbClient.scan.mockClear(); - mockDynamoDbClient.scan.mockImplementation(() => { - return {promise: promiseFunc}; - }); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - class ScannableItem { - snap!: string; - fizz?: Array; - - get [DynamoDbTable]() { return 'foo'; } - get [DynamoDbSchema]() { - return { - snap: { - type: 'String', - keyType: 'HASH', - }, - fizz: { - type: 'List', - memberType: {type: 'String'}, - attributeName: 'fizzes', - }, - }; - } - - static fromKey(key: string) { - const target = new ScannableItem; - target.snap = key; - return target; - } - } - - it( - 'should throw if the item does not provide a schema per the data mapper protocol', - () => { - expect(() => mapper.scan( - class { - get [DynamoDbTable]() { return 'foo'; } - }, - )).toThrow( - 'The provided item did not adhere to the DynamoDbDocument protocol. No object property was found at the `DynamoDbSchema` symbol' - ); - } - ); - - it( - 'should throw if the item does not provide a table name per the data mapper protocol', - () => { - expect(() => mapper.scan(class { - get [DynamoDbSchema]() { return {}; } - })).toThrow( - 'The provided item did not adhere to the DynamoDbTable protocol. No string property was found at the `DynamoDbTable` symbol' - ); - } - ); - - it( - 'should paginate over results and return a promise for each item', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - class ScannableItem { - get [DynamoDbTable]() { return 'foo'; } - get [DynamoDbSchema]() { - return { - foo: { - type: 'String', - attributeName: 'fizz', - keyType: 'HASH', - }, - bar: { - type: 'Set', - memberType: 'Number' - }, - baz: { - type: 'Tuple', - members: [{type: 'Boolean'}, {type: 'Number'}] - }, - }; - } - } - - const results = mapper.scan(ScannableItem); - - const result: any[] = []; - for await (const res of results) { - result.push(res); - } - - expect(result).toEqual([ - { - foo: 'snap', - bar: new Set([1, 2, 3]), - baz: [true, 4], - }, - { - foo: 'crackle', - bar: new Set([5, 6, 7]), - baz: [false, 8], - }, - { - foo: 'pop', - bar: new Set([9, 12, 30]), - baz: [true, 24], - }, - ]); - - for (const item of result) { - expect(item).toBeInstanceOf(ScannableItem); - } - } - ); - - it( - 'should request a consistent read if the readConsistency is StronglyConsistent', - async () => { - const results = mapper.scan( - ScannableItem, - {readConsistency: 'strong'} - ); - - await results.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0]) - .toMatchObject({ConsistentRead: true}); - } - ); - - it('should allow a filter expression', async () => { - const results = mapper.scan( - ScannableItem, - { - filter: { - type: 'Not', - condition: { - subject: 'fizz[1]', - ...equals('buzz'), - } - }, - } - ); - - await results.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0]) - .toMatchObject({ - FilterExpression: 'NOT (#attr0[1] = :val1)', - ExpressionAttributeNames: { - '#attr0': 'fizzes', - }, - ExpressionAttributeValues: { - ':val1': {S: 'buzz'}, - }, - }); - }); - - it('should allow a projection expression', async () => { - const results = mapper.scan( - ScannableItem, - {projection: ['snap', 'fizz[1]']} - ); - - await results.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0]) - .toMatchObject({ - ProjectionExpression: '#attr0, #attr1[1]', - ExpressionAttributeNames: { - '#attr0': 'snap', - '#attr1': 'fizzes', - }, - }); - }); - - it('should allow a start key', async () => { - const results = mapper.scan( - class { - get [DynamoDbTable]() { return 'foo'; } - get [DynamoDbSchema]() { - return { - snap: { - type: 'String', - keyType: 'HASH', - }, - fizz: { - type: 'Number', - keyType: 'RANGE' - }, - }; - } - }, - {startKey: {fizz: 100}} - ); - - await results.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0]) - .toMatchObject({ - ExclusiveStartKey: { - fizz: {N: '100'}, - } - }); - }); - - it('should allow the page size to be set', async () => { - const results = mapper.scan(ScannableItem, {pageSize: 20}); - - await results.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0]) - .toMatchObject({Limit: 20}); - }); - - it('should not use a page size greater than the "limit" parameter', async () => { - const results = mapper.scan(ScannableItem, { - limit: 20, - pageSize: 200 - }); - - await results.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0]) - .toMatchObject({Limit: 20}); - }); - - it('should not use a page size greater than the "pageSize" parameter', async () => { - const results = mapper.scan(ScannableItem, { - pageSize: 20, - limit: 200, - }); - - await results.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0]) - .toMatchObject({Limit: 20}); - }); - - it('should cease iteration once the limit has been reached', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { snap: {S: 'snap'} }, - { snap: {S: 'crackle'} }, - { snap: {S: 'pop'} }, - ], - LastEvaluatedKey: {snap: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { snap: {S: 'fizz'} }, - ], - LastEvaluatedKey: {snap: {S: 'fizz'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { snap: {S: 'buzz'} }, - ], - LastEvaluatedKey: {snap: {S: 'buzz'}}, - })); - - const results = mapper.scan(ScannableItem, { limit: 5 }); - - for await (const _ of results) { - // pass - } - - expect(results.pages().lastEvaluatedKey) - .toEqual(ScannableItem.fromKey('buzz')); - - expect(mockDynamoDbClient.scan.mock.calls).toEqual([ - [{ - TableName: 'foo', - Limit: 5 - }], - [{ - TableName: 'foo', - Limit: 2, - ExclusiveStartKey: { - snap: {S: 'pop'} - } - }], - [{ - TableName: 'foo', - Limit: 1, - ExclusiveStartKey: { - snap: {S: 'fizz'} - } - }] - ]); - }); - - it('should support the legacy call pattern', async () => { - const iter = mapper.scan({ - valueConstructor: ScannableItem, - indexName: 'baz-index' - }); - - await iter.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0]).toEqual({ - TableName: 'foo', - IndexName: 'baz-index' - }); - }); - - describe('startKey serialization', () => { - class MyItem { - snap?: string; - crackle?: number; - pop?: Date; - - constructor(key?: string) { - this.snap = key; - } - - get [DynamoDbTable]() { return 'table'; } - get [DynamoDbSchema]() { - return { - snap: { - type: 'String', - keyType: 'HASH', - }, - crackle: { - type: 'Number', - keyType: 'RANGE', - defaultProvider: () => 0, - indexKeyConfigurations: { - myIndex: { keyType: 'RANGE' } - } - }, - pop: { - type: 'Date', - defaultProvider: () => new Date, - indexKeyConfigurations: { - myIndex: { keyType: 'HASH' } - } - }, - }; - } - } - - it('should not inject default properties into the startKey', async () => { - const iter = mapper.scan( - MyItem, - { startKey: new MyItem('key') } - ); - await iter.next(); - - expect(mockDynamoDbClient.scan.mock.calls[0][0].ExclusiveStartKey) - .toEqual({ - snap: {S: 'key'}, - }); - }); - }); - }); - - describe('updating items', () => { - const tableName = 'foo'; - - class EmptyItem { - get [DynamoDbTable]() { - return tableName; - } - - get [DynamoDbSchema]() { - return {}; - } - } - - class ComplexItem extends EmptyItem { - foo!: string; - bar?: [number, BinaryValue]; - quux?: { - snap: string; - crackle: Date; - pop: {[key: string]: any}; - }; - - get [DynamoDbSchema]() { - return { - foo: { - type: 'String', - keyType: 'HASH', - attributeName: 'fizz' - }, - bar: { - type: 'Tuple', - members: [ - {type: 'Number'}, - {type: 'Binary'}, - ], - attributeName: 'buzz', - }, - quux: { - type: 'Document', - members: { - snap: { type: 'String' }, - crackle: { type: 'Date' }, - pop: { type: 'Hash' }, - } as Schema, - }, - }; - } - } - - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - updateItem: jest.fn(), - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Attributes: {}})); - mockDynamoDbClient.updateItem.mockClear(); - mockDynamoDbClient.updateItem.mockImplementation(() => ({promise: promiseFunc})); - }); - - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - }); - - describe('#update', () => { - it( - 'should throw if the item does not provide a schema per the data mapper protocol', - async () => { - await expect(mapper.update({ - [DynamoDbTable]: 'foo', - })).rejects.toMatchObject(new Error( - 'The provided item did not adhere to the DynamoDbDocument protocol. No object property was found at the `DynamoDbSchema` symbol' - )); - } - ); - - it( - 'should throw if the item does not provide a table name per the data mapper protocol', - async () => { - await expect(mapper.update({ - [DynamoDbSchema]: {}, - })).rejects.toMatchObject(new Error( - 'The provided item did not adhere to the DynamoDbTable protocol. No string property was found at the `DynamoDbTable` symbol' - )); - } - ); - - it( - 'should use the table name specified in the supplied table definition', - async () => { - const tableName = 'foo'; - await mapper.update({item: new EmptyItem()}); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .toMatchObject({TableName: tableName}); - } - ); - - it( - 'should apply a table name prefix provided to the mapper constructor', - async () => { - const tableNamePrefix = 'INTEG_'; - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - tableNamePrefix, - }); - const tableName = 'foo'; - await mapper.update(new EmptyItem()); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .toMatchObject({TableName: tableNamePrefix + tableName}); - } - ); - - it('should marshall updates into an UpdateItemInput', async () => { - const item = new ComplexItem(); - item.foo = 'key'; - item.bar = [1, Uint8Array.from([0xde, 0xad, 0xbe, 0xef])]; - - await mapper.update(item); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .toMatchObject({ - TableName: tableName, - Key: { - fizz: {S: 'key'} - }, - ExpressionAttributeNames: { - '#attr0': 'buzz', - '#attr2': 'quux', - }, - ExpressionAttributeValues: { - ':val1': { - L: [ - {N: '1'}, - {B: Uint8Array.from([0xde, 0xad, 0xbe, 0xef])} - ], - } - }, - UpdateExpression: 'SET #attr0 = :val1 REMOVE #attr2', - }); - }); - - it( - 'should not remove missing keys when onMissing is "SKIP"', - async () => { - const item = new ComplexItem(); - item.foo = 'key'; - item.bar = [1, Uint8Array.from([0xde, 0xad, 0xbe, 0xef])]; - await mapper.update(item, {onMissing: 'skip'}); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .toMatchObject({ - TableName: tableName, - Key: { - fizz: {S: 'key'} - }, - ExpressionAttributeNames: { - '#attr0': 'buzz', - }, - ExpressionAttributeValues: { - ':val1': { - L: [ - {N: '1'}, - {B: Uint8Array.from([0xde, 0xad, 0xbe, 0xef])} - ], - } - }, - UpdateExpression: 'SET #attr0 = :val1', - }); - } - ); - - it('should unmarshall any returned attributes', async () => { - promiseFunc.mockImplementation(() => Promise.resolve({Attributes: { - fizz: {S: 'buzz'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }})); - - const result = await mapper.update({ - foo: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - foo: { - type: 'String', - attributeName: 'fizz', - keyType: 'HASH', - }, - bar: { - type: 'Set', - memberType: 'Number' - }, - baz: { - type: 'Tuple', - members: [{type: 'Boolean'}, {type: 'Number'}] - }, - }, - }); - - expect(result).toEqual({ - foo: 'buzz', - bar: new Set([1, 2, 3]), - baz: [true, 4], - }) - }); - - it('should throw an error if no attributes were returned', async () => { - promiseFunc.mockImplementation(() => Promise.resolve({})); - - return expect(mapper.update({ - foo: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - foo: { - type: 'String', - attributeName: 'fizz', - keyType: 'HASH', - }, - bar: { - type: 'Set', - memberType: 'Number' - }, - baz: { - type: 'Tuple', - members: [{type: 'Boolean'}, {type: 'Number'}] - }, - }, - })).rejects.toMatchObject(new Error( - 'Update operation completed successfully, but the updated value was not returned' - )); - }); - - describe('version attributes', () => { - class VersionedItem { - foo!: string; - bar?: [number, Uint8Array]; - baz?: number; - - get [DynamoDbTable]() { - return 'table'; - } - - get [DynamoDbSchema]() { - return { - foo: { - type: 'String', - keyType: 'HASH', - attributeName: 'fizz' - }, - bar: { - type: 'Tuple', - members: [ - {type: 'Number'}, - {type: 'Binary'}, - ], - attributeName: 'buzz', - }, - baz: { - type: 'Number', - versionAttribute: true, - }, - }; - } - } - - it( - 'should inject a conditional expression requiring the absence of the versioning property and set its value to 0 when an object without a value for it is marshalled', - async () => { - const item = new VersionedItem(); - item.foo = 'key'; - item.bar = [1, Uint8Array.from([0xde, 0xad, 0xbe, 0xef])]; - - await mapper.update(item); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .toMatchObject({ - TableName: 'table', - Key: { - fizz: {S: 'key'} - }, - ConditionExpression: 'attribute_not_exists(#attr0)', - ExpressionAttributeNames: { - '#attr0': 'baz', - '#attr1': 'buzz', - }, - ExpressionAttributeValues: { - ':val2': { - L: [ - {N: '1'}, - {B: Uint8Array.from([0xde, 0xad, 0xbe, 0xef])} - ], - }, - ':val3': {N: '0'}, - }, - UpdateExpression: 'SET #attr1 = :val2, #attr0 = :val3', - }); - } - ); - - it( - 'should inject a conditional expression requiring the known value of the versioning property and set its value to the previous value + 1 when an object with a value for it is marshalled', - async () => { - const item = new VersionedItem(); - item.foo = 'key'; - item.bar = [1, Uint8Array.from([0xde, 0xad, 0xbe, 0xef])]; - item.baz = 10; - - await mapper.update(item); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .toMatchObject({ - TableName: 'table', - Key: { - fizz: {S: 'key'} - }, - ConditionExpression: '#attr0 = :val1', - ExpressionAttributeNames: { - '#attr0': 'baz', - '#attr2': 'buzz', - }, - ExpressionAttributeValues: { - ':val1': {N: '10'}, - ':val3': { - L: [ - {N: '1'}, - {B: Uint8Array.from([0xde, 0xad, 0xbe, 0xef])} - ], - }, - ':val4': {N: '1'}, - }, - UpdateExpression: 'SET #attr2 = :val3, #attr0 = #attr0 + :val4', - }); - } - ); - - it( - 'should not include a condition expression when the skipVersionCheck input parameter is true', - async () => { - const item = new VersionedItem(); - item.foo = 'key'; - item.bar = [1, Uint8Array.from([0xde, 0xad, 0xbe, 0xef])]; - item.baz = 10; - - await mapper.update(item, {skipVersionCheck: true}); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .not.toHaveProperty('ConditionExpression'); - } - ); - - it( - `should not include a condition expression when the mapper's default skipVersionCheck input parameter is true`, - async () => { - const mapper = new DataMapper({ - client: mockDynamoDbClient as any, - skipVersionCheck: true - }); - - const item = new VersionedItem(); - item.foo = 'key'; - item.bar = [1, Uint8Array.from([0xde, 0xad, 0xbe, 0xef])]; - item.baz = 10; - - await mapper.update(item); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .not.toHaveProperty('ConditionExpression'); - } - ); - - it( - 'should combine the version condition with any other condition expression', - async () => { - const item = new VersionedItem(); - item.foo = 'key'; - item.bar = [1, Uint8Array.from([0xde, 0xad, 0xbe, 0xef])]; - item.baz = 10; - - await mapper.update(item, { - condition: { - type: 'LessThan', - subject: 'bar[0]', - object: 600000 - } - }); - - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .toMatchObject({ - ConditionExpression: '(#attr0[0] < :val1) AND (#attr2 = :val3)', - ExpressionAttributeNames: { - '#attr0': 'buzz', - '#attr2': 'baz', - }, - ExpressionAttributeValues: { - ':val1': {N: '600000'}, - ':val3': {N: '10'}, - ':val4': { - L: [ - {N: '1'}, - {B: Uint8Array.from([0xde, 0xad, 0xbe, 0xef])}, - ], - }, - }, - }); - } - ); - }); - - it('should support the legacy call pattern', async () => { - await mapper.update({ - item: { - fizz: 'buzz', - [DynamoDbTable]: 'foo', - [DynamoDbSchema]: { - fizz: { - type: 'String', - attributeName: 'foo', - keyType: 'HASH', - }, - pop: { - type: 'Number', - versionAttribute: true, - }, - }, - }, - }); - }); - - it('should return an instance of the provided class', async () => { - const item = new ComplexItem(); - item.foo = 'key'; - item.bar = [1, Uint8Array.from([0xde, 0xad, 0xbe, 0xef])]; - - const result = await mapper.update(item); - - expect(result).toBeInstanceOf(ComplexItem); - }); - }); - - describe('#executeUpdateExpression', () => { - it( - 'should use the provided schema to execute the provided expression', - async () => { - const expression = new UpdateExpression; - expression.set(new AttributePath('bar[1]'), Uint8Array.from([0xde, 0xad, 0xbe, 0xef])); - - const updated = await mapper.executeUpdateExpression(expression, {foo: 'key'}, ComplexItem); - - expect(updated).toBeInstanceOf(ComplexItem); - expect(mockDynamoDbClient.updateItem.mock.calls[0][0]) - .toMatchObject({ - TableName: tableName, - Key: { - fizz: {S: 'key'} - }, - ExpressionAttributeNames: { - '#attr0': 'buzz', - }, - ExpressionAttributeValues: { - ':val1': {B: Uint8Array.from([0xde, 0xad, 0xbe, 0xef])}, - }, - UpdateExpression: 'SET #attr0[1] = :val1', - }); - } - ); - }); - }); -}); diff --git a/packages/dynamodb-data-mapper/src/DataMapper.ts b/packages/dynamodb-data-mapper/src/DataMapper.ts deleted file mode 100644 index 0c923dae..00000000 --- a/packages/dynamodb-data-mapper/src/DataMapper.ts +++ /dev/null @@ -1,1385 +0,0 @@ -import { BatchState } from './BatchState'; -import { - ReadConsistency, - StringToAnyObjectMap, - SyncOrAsyncIterable, - VERSION, - WriteType, -} from './constants'; -import { ItemNotFoundException } from './ItemNotFoundException'; -import { - BatchGetOptions, - BatchGetTableOptions, - CreateTableOptions, - DataMapperConfiguration, - DeleteOptions, - DeleteParameters, - ExecuteUpdateExpressionOptions, - GetOptions, - GetParameters, - ParallelScanOptions, - ParallelScanParameters, - ParallelScanWorkerOptions, - ParallelScanWorkerParameters, - PerIndexOptions, - PutOptions, - PutParameters, - QueryOptions, - QueryParameters, - ScanOptions, - ScanParameters, - SecondaryIndexProjection, - UpdateOptions, - UpdateParameters, -} from './namedParameters'; -import { ParallelScanIterator } from './ParallelScanIterator'; -import { DynamoDbTable, getSchema, getTableName } from './protocols'; -import { QueryIterator } from './QueryIterator'; -import { ScanIterator } from './ScanIterator'; -import { - BatchGet, - BatchWrite, - PerTableOptions, - TableOptions, - WriteRequest, -} from '@aws/dynamodb-batch-iterator'; -import { - AttributeTypeMap, - getSchemaName, - isKey, - keysFromSchema, - KeyTypeMap, - marshallConditionExpression, - marshallItem, - marshallKey, - marshallUpdateExpression, - marshallValue, - PerIndexKeys, - Schema, - SchemaType, - toSchemaName, - unmarshallItem, - ZeroArgumentsConstructor, -} from '@aws/dynamodb-data-marshaller'; -import { - AttributePath, - AttributeValue, - ConditionExpression, - ConditionExpressionPredicate, - ExpressionAttributes, - FunctionExpression, - MathematicalExpression, - PathElement, - serializeProjectionExpression, - UpdateExpression, -} from '@aws/dynamodb-expressions'; -import { - AttributeDefinition, - AttributeMap, - CreateGlobalSecondaryIndexAction, - DeleteItemInput, - GetItemInput, - GlobalSecondaryIndexList, - KeySchemaElement, - LocalSecondaryIndexList, - Projection, - ProvisionedThroughput, - PutItemInput, - UpdateItemInput, -} from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -require('./asyncIteratorSymbolPolyfill'); - -/** - * Object mapper for domain object interaction with DynamoDB. - * - * To use, define a schema that describes how an item is represented in a - * DynamoDB table. This schema will be used to marshall a native JavaScript - * object into its desired persisted form. Attributes present on the object - * but not in the schema will be ignored. - */ -export class DataMapper { - private readonly client: DynamoDB; - private readonly readConsistency: ReadConsistency; - private readonly skipVersionCheck: boolean; - private readonly tableNamePrefix: string; - - constructor({ - client, - readConsistency = 'eventual', - skipVersionCheck = false, - tableNamePrefix = '' - }: DataMapperConfiguration) { - client.config.customUserAgent = ` dynamodb-data-mapper-js/${VERSION}`; - this.client = client; - this.readConsistency = readConsistency; - this.skipVersionCheck = skipVersionCheck; - this.tableNamePrefix = tableNamePrefix; - } - - /** - * Deletes items from DynamoDB in batches of 25 or fewer via one or more - * BatchWriteItem operations. The items may be from any number of tables; - * tables and schemas for each item are determined using the - * {DynamoDbSchema} property and the {DynamoDbTable} property on defined on - * each item supplied. - * - * This method will automatically retry any delete requests returned by - * DynamoDB as unprocessed. Exponential backoff on unprocessed items is - * employed on a per-table basis. - * - * @param items A synchronous or asynchronous iterable of items to delete. - */ - async *batchDelete( - items: SyncOrAsyncIterable - ) { - const iter = this.batchWrite( - async function *mapToDelete(): AsyncIterable<['delete', T]> { - for await (const item of items) { - yield ['delete', item]; - } - }() - ); - - for await (const written of iter) { - yield written[1]; - } - } - - /** - * Retrieves items from DynamoDB in batches of 100 or fewer via one or more - * BatchGetItem operations. The items may be from any number of tables; - * tables and schemas for each item are determined using the - * {DynamoDbSchema} property and the {DynamoDbTable} property on defined on - * each item supplied. - * - * This method will automatically retry any get requests returned by - * DynamoDB as unprocessed. Exponential backoff on unprocessed items is - * employed on a per-table basis. - * - * @param items A synchronous or asynchronous iterable of items to get. - */ - async *batchGet( - items: SyncOrAsyncIterable, - { - readConsistency = this.readConsistency, - perTableOptions = {} - }: BatchGetOptions = {} - ) { - const state: BatchState = {}; - const options: PerTableOptions = {}; - - const batch = new BatchGet( - this.client, - this.mapGetBatch(items, state, perTableOptions, options), - { - ConsistentRead: readConsistency === 'strong' ? true : undefined, - PerTableOptions: options - } - ); - - for await (const [tableName, marshalled] of batch) { - const {keyProperties, itemSchemata} = state[tableName]; - const { - constructor, - schema, - } = itemSchemata[itemIdentifier(marshalled, keyProperties)]; - yield unmarshallItem(schema, marshalled, constructor); - } - } - - /** - * Puts items into DynamoDB in batches of 25 or fewer via one or more - * BatchWriteItem operations. The items may be from any number of tables; - * tables and schemas for each item are determined using the - * {DynamoDbSchema} property and the {DynamoDbTable} property on defined on - * each item supplied. - * - * This method will automatically retry any put requests returned by - * DynamoDB as unprocessed. Exponential backoff on unprocessed items is - * employed on a per-table basis. - * - * @param items A synchronous or asynchronous iterable of items to put. - */ - async *batchPut( - items: SyncOrAsyncIterable - ) { - const generator: SyncOrAsyncIterable<[WriteType, T]> = isIterable(items) - ? function *mapToPut() { - for (const item of items) { - yield ['put', item] as [WriteType, T]; - } - }() - : async function *mapToPut() { - for await (const item of items) { - yield ['put', item] as [WriteType, T]; - } - }(); - - for await (const written of this.batchWrite(generator)) { - yield written[1]; - } - } - - /** - * Puts or deletes items from DynamoDB in batches of 25 or fewer via one or - * more BatchWriteItem operations. The items may belong to any number of - * tables; tables and schemas for each item are determined using the - * {DynamoDbSchema} property and the {DynamoDbTable} property on defined on - * each item supplied. - * - * This method will automatically retry any write requests returned by - * DynamoDB as unprocessed. Exponential backoff on unprocessed items is - * employed on a per-table basis. - * - * @param items A synchronous or asynchronous iterable of tuples of the - * string 'put'|'delete' and the item on which to perform the specified - * write action. - */ - async *batchWrite( - items: SyncOrAsyncIterable<[WriteType, T]> - ): AsyncIterableIterator<[WriteType, T]> { - const state: BatchState = {}; - const batch = new BatchWrite( - this.client, - this.mapWriteBatch(items, state) - ); - - for await (const [tableName, {DeleteRequest, PutRequest}] of batch) { - const {keyProperties, itemSchemata} = state[tableName]; - const attributes = PutRequest - ? PutRequest.Item - : (DeleteRequest || {Key: {}}).Key - const { - constructor, - schema, - } = itemSchemata[itemIdentifier(attributes, keyProperties)]; - - yield [ - PutRequest ? 'put' : 'delete', - unmarshallItem(schema, attributes, constructor) - ]; - } - } - - /** - * Perform a CreateTable operation using the schema accessible via the - * {DynamoDbSchema} property and the table name accessible via the - * {DynamoDbTable} property on the prototype of the constructor supplied. - * - * The promise returned by this method will not resolve until the table is - * active and ready for use. - * - * @param valueConstructor The constructor used for values in the table. - * @param options Options to configure the CreateTable operation - */ - async createTable( - valueConstructor: ZeroArgumentsConstructor, - options: CreateTableOptions - ) { - const schema = getSchema(valueConstructor.prototype); - const { attributes, indexKeys, tableKeys } = keysFromSchema(schema); - const TableName = this.getTableName(valueConstructor.prototype); - - let throughput: { ProvisionedThroughput?: ProvisionedThroughput } = {}; - if (options.billingMode !== 'PAY_PER_REQUEST') { - throughput = { - ...provisionedThroughput(options.readCapacityUnits, options.writeCapacityUnits), - }; - } - - const { - streamViewType = 'NONE', - indexOptions = {}, - billingMode, - sseSpecification, - } = options; - - const { - TableDescription: {TableStatus} = {TableStatus: 'CREATING'} - } = await this.client.createTable({ - ...indexDefinitions(indexKeys, indexOptions, schema), - TableName, - ...throughput, - BillingMode: billingMode, - AttributeDefinitions: attributeDefinitionList(attributes), - KeySchema: keyTypesToElementList(tableKeys), - StreamSpecification: streamViewType === 'NONE' - ? { StreamEnabled: false } - : { StreamEnabled: true, StreamViewType: streamViewType }, - SSESpecification: sseSpecification - ? { - Enabled: true, - SSEType: sseSpecification.sseType, - KMSMasterKeyId: sseSpecification.kmsMasterKeyId, - } - : { Enabled: false }, - }).promise(); - - if (TableStatus !== 'ACTIVE') { - await this.client.waitFor('tableExists', {TableName}).promise(); - } - } - - /** - * Perform a UpdateTable operation using the schema accessible via the - * {DynamoDbSchema} property, the table name accessible via the - * {DynamoDbTable} property on the prototype of the constructor supplied, - * and the specified global secondary index name. - * - * The promise returned by this method will not resolve until the table is - * active and ready for use. - * - * @param valueConstructor The constructor used for values in the table. - * @param options Options to configure the UpdateTable operation - */ - async createGlobalSecondaryIndex( - valueConstructor: ZeroArgumentsConstructor, - indexName: string, - { - indexOptions = {}, - }: CreateTableOptions - ) { - const schema = getSchema(valueConstructor.prototype); - const { attributes, indexKeys } = keysFromSchema(schema); - const TableName = this.getTableName(valueConstructor.prototype); - - const globalSecondaryIndexes = indexDefinitions(indexKeys, indexOptions, schema).GlobalSecondaryIndexes; - const indexSearch = globalSecondaryIndexes === undefined ? [] : globalSecondaryIndexes.filter(function(index) { - return index.IndexName === indexName; - }); - const indexDefinition: CreateGlobalSecondaryIndexAction = indexSearch[0]; - - const { - TableDescription: {TableStatus} = {TableStatus: 'UPDATING'} - } = await this.client.updateTable({ - GlobalSecondaryIndexUpdates: [{ - Create: { - ...indexDefinition - } - }], - TableName, - AttributeDefinitions: attributeDefinitionList(attributes), - }).promise(); - - if (TableStatus !== 'ACTIVE') { - await this.client.waitFor('tableExists', {TableName}).promise(); - } - } - - /** - * If the index does not already exist, perform a UpdateTable operation - * using the schema accessible via the {DynamoDbSchema} property, the - * table name accessible via the {DynamoDbTable} property on the prototype - * of the constructor supplied, and the index name. - * - * The promise returned by this method will not resolve until the table is - * active and ready for use. Note that the index will not be usable for queries - * until it has finished backfilling - * - * @param valueConstructor The constructor used for values in the table. - * @param options Options to configure the UpdateTable operation - */ - async ensureGlobalSecondaryIndexExists( - valueConstructor: ZeroArgumentsConstructor, - indexName: string, - options: CreateTableOptions - ) { - const TableName = this.getTableName(valueConstructor.prototype); - try { - const { - Table: {GlobalSecondaryIndexes } = {GlobalSecondaryIndexes: []} - } = await this.client.describeTable({TableName}).promise(); - const indexSearch = GlobalSecondaryIndexes === undefined ? [] : GlobalSecondaryIndexes.filter(function(index) { - return index.IndexName === indexName; - }); - if (indexSearch.length === 0) { - await this.createGlobalSecondaryIndex(valueConstructor, indexName, options); - } - } catch (err) { - throw err; - } - } - - /** - * Perform a DeleteItem operation using the schema accessible via the - * {DynamoDbSchema} property and the table name accessible via the - * {DynamoDbTable} property on the item supplied. - * - * @param item The item to delete - * @param options Options to configure the DeleteItem operation - */ - delete( - item: T, - options?: DeleteOptions - ): Promise; - - /** - * @deprecated - */ - delete( - parameters: DeleteParameters - ): Promise; - - async delete( - itemOrParameters: T|DeleteParameters, - options: DeleteOptions = {} - ): Promise { - let item: T; - if ( - 'item' in itemOrParameters && - (itemOrParameters as any).item[DynamoDbTable] - ) { - item = (itemOrParameters as DeleteParameters).item; - options = itemOrParameters as DeleteParameters; - } else { - item = itemOrParameters as T; - } - let { - condition, - returnValues = 'ALL_OLD', - skipVersionCheck = this.skipVersionCheck, - } = options; - - const schema = getSchema(item); - - const req: DeleteItemInput = { - TableName: this.getTableName(item), - Key: marshallKey(schema, item), - ReturnValues: returnValues, - }; - - if (!skipVersionCheck) { - for (const prop of Object.keys(schema)) { - let inputMember = item[prop]; - const fieldSchema = schema[prop]; - - if (isVersionAttribute(fieldSchema) && inputMember !== undefined) { - const {condition: versionCondition} = handleVersionAttribute( - prop, - inputMember - ); - - condition = condition - ? {type: 'And', conditions: [condition, versionCondition]} - : versionCondition; - } - } - } - - if (condition) { - const attributes = new ExpressionAttributes(); - req.ConditionExpression = marshallConditionExpression( - condition, - schema, - attributes - ).expression; - - if (Object.keys(attributes.names).length > 0) { - req.ExpressionAttributeNames = attributes.names; - } - - if (Object.keys(attributes.values).length > 0) { - req.ExpressionAttributeValues = attributes.values; - } - } - - const {Attributes} = await this.client.deleteItem(req).promise(); - if (Attributes) { - return unmarshallItem( - schema, - Attributes, - item.constructor as ZeroArgumentsConstructor - ); - } - } - - /** - * Perform a DeleteTable operation using the schema accessible via the - * {DynamoDbSchema} property and the table name accessible via the - * {DynamoDbTable} property on the prototype of the constructor supplied. - * - * The promise returned by this method will not resolve until the table is - * deleted and can no longer be used. - * - * @param valueConstructor The constructor used for values in the table. - */ - async deleteTable(valueConstructor: ZeroArgumentsConstructor) { - const TableName = this.getTableName(valueConstructor.prototype); - await this.client.deleteTable({TableName}).promise(); - await this.client.waitFor('tableNotExists', {TableName}).promise(); - } - - /** - * If the table does not already exist, perform a CreateTable operation - * using the schema accessible via the {DynamoDbSchema} property and the - * table name accessible via the {DynamoDbTable} property on the prototype - * of the constructor supplied. - * - * The promise returned by this method will not resolve until the table is - * active and ready for use. - * - * @param valueConstructor The constructor used for values in the table. - * @param options Options to configure the CreateTable operation - */ - async ensureTableExists( - valueConstructor: ZeroArgumentsConstructor, - options: CreateTableOptions - ) { - const TableName = this.getTableName(valueConstructor.prototype); - try { - const { - Table: {TableStatus} = {TableStatus: 'CREATING'} - } = await this.client.describeTable({TableName}).promise(); - - if (TableStatus !== 'ACTIVE') { - await this.client.waitFor('tableExists', {TableName}).promise(); - } - } catch (err) { - if (err.name === 'ResourceNotFoundException') { - await this.createTable(valueConstructor, options); - } else { - throw err; - } - } - } - - /** - * If the table exists, perform a DeleteTable operation using the schema - * accessible via the {DynamoDbSchema} property and the table name - * accessible via the {DynamoDbTable} property on the prototype of the - * constructor supplied. - * - * The promise returned by this method will not resolve until the table is - * deleted and can no longer be used. - * - * @param valueConstructor The constructor used for values in the table. - */ - async ensureTableNotExists( - valueConstructor: ZeroArgumentsConstructor - ) { - const TableName = this.getTableName(valueConstructor.prototype); - try { - const { - Table: {TableStatus: status} = {TableStatus: 'CREATING'} - } = await this.client.describeTable({TableName}).promise(); - - if (status === 'DELETING') { - await this.client.waitFor('tableNotExists', {TableName}) - .promise(); - return; - } else if (status === 'CREATING' || status === 'UPDATING') { - await this.client.waitFor('tableExists', {TableName}) - .promise(); - } - - await this.deleteTable(valueConstructor); - } catch (err) { - if (err.name !== 'ResourceNotFoundException') { - throw err; - } - } - } - - /** - * Perform a GetItem operation using the schema accessible via the - * {DynamoDbSchema} method and the table name accessible via the - * {DynamoDbTable} method on the item supplied. - * - * @param item The item to get - * @param options Options to configure the GetItem operation - */ - get( - item: T, - options?: GetOptions - ): Promise; - - /** - * @deprecated - */ - get( - parameters: GetParameters - ): Promise; - - async get( - itemOrParameters: T|GetParameters, - options: GetOptions = {} - ): Promise { - let item: T; - if ( - 'item' in itemOrParameters && - (itemOrParameters as any).item[DynamoDbTable] - ) { - item = (itemOrParameters as GetParameters).item; - options = itemOrParameters as GetParameters; - } else { - item = itemOrParameters as T; - } - const { - projection, - readConsistency = this.readConsistency - } = options; - - const schema = getSchema(item); - const req: GetItemInput = { - TableName: this.getTableName(item), - Key: marshallKey(schema, item) - }; - - if (readConsistency === 'strong') { - req.ConsistentRead = true; - } - - if (projection) { - const attributes = new ExpressionAttributes(); - req.ProjectionExpression = serializeProjectionExpression( - projection.map(propName => toSchemaName(propName, schema)), - attributes - ); - - if (Object.keys(attributes.names).length > 0) { - req.ExpressionAttributeNames = attributes.names; - } - } - - const {Item} = await this.client.getItem(req).promise(); - if (Item) { - return unmarshallItem( - schema, - Item, - item.constructor as ZeroArgumentsConstructor - ); - } - - throw new ItemNotFoundException(req); - } - - /** - * Perform a Scan operation using the schema accessible via the - * {DynamoDbSchema} method and the table name accessible via the - * {DynamoDbTable} method on the prototype of the constructor supplied. - * - * This scan will be performed by multiple parallel workers, each of which - * will perform a sequential scan of a segment of the table or index. Use - * the `segments` parameter to specify the number of workers to be used. - * - * @param valueConstructor The constructor to be used for each item - * returned by the scan - * @param segments The number of parallel workers to use to perform - * the scan - * @param options Options to configure the Scan operation - * - * @return An asynchronous iterator that yields scan results. Intended - * to be consumed with a `for await ... of` loop. - */ - parallelScan( - valueConstructor: ZeroArgumentsConstructor, - segments: number, - options?: ParallelScanOptions - ): ParallelScanIterator; - - /** - * @deprecated - */ - parallelScan( - parameters: ParallelScanParameters - ): ParallelScanIterator; - - parallelScan( - ctorOrParams: ZeroArgumentsConstructor|ParallelScanParameters, - segments?: number, - options: ParallelScanOptions = {} - ): ParallelScanIterator { - let valueConstructor: ZeroArgumentsConstructor; - if (typeof segments !== 'number') { - valueConstructor = (ctorOrParams as ParallelScanParameters).valueConstructor; - segments = (ctorOrParams as ParallelScanParameters).segments; - options = ctorOrParams as ParallelScanParameters; - } else { - valueConstructor = ctorOrParams as ZeroArgumentsConstructor; - } - - return new ParallelScanIterator( - this.client, - valueConstructor, - segments, - { - readConsistency: this.readConsistency, - ...options, - tableNamePrefix: this.tableNamePrefix, - } - ); - } - - /** - * Perform a PutItem operation using the schema accessible via the - * {DynamoDbSchema} method and the table name accessible via the - * {DynamoDbTable} method on the item supplied. - * - * @param item The item to save to DynamoDB - * @param options Options to configure the PutItem operation - */ - put( - item: T, - options?: PutOptions - ): Promise; - - /** - * @deprecated - */ - put( - parameters: PutParameters - ): Promise; - - async put( - itemOrParameters: T|PutParameters, - options: PutOptions = {} - ): Promise { - let item: T; - if ( - 'item' in itemOrParameters && - (itemOrParameters as any).item[DynamoDbTable] - ) { - item = (itemOrParameters as PutParameters).item; - options = itemOrParameters as PutParameters; - } else { - item = itemOrParameters as T; - } - let { - condition, - skipVersionCheck = this.skipVersionCheck, - } = options; - - const schema = getSchema(item); - const req: PutItemInput = { - TableName: this.getTableName(item), - Item: marshallItem(schema, item), - }; - - if (!skipVersionCheck) { - for (const key of Object.keys(schema)) { - let inputMember = item[key]; - const fieldSchema = schema[key]; - const {attributeName = key} = fieldSchema; - - if (isVersionAttribute(fieldSchema)) { - const {condition: versionCond} = handleVersionAttribute( - key, - inputMember - ); - if (req.Item[attributeName]) { - req.Item[attributeName].N = ( - Number(req.Item[attributeName].N) + 1 - ).toString(); - } else { - req.Item[attributeName] = {N: "0"}; - } - - condition = condition - ? {type: 'And', conditions: [condition, versionCond]} - : versionCond; - } - } - } - - if (condition) { - const attributes = new ExpressionAttributes(); - req.ConditionExpression = marshallConditionExpression( - condition, - schema, - attributes - ).expression; - - if (Object.keys(attributes.names).length > 0) { - req.ExpressionAttributeNames = attributes.names; - } - - if (Object.keys(attributes.values).length > 0) { - req.ExpressionAttributeValues = attributes.values; - } - } - - await this.client.putItem(req).promise(); - - return unmarshallItem( - schema, - req.Item, - item.constructor as ZeroArgumentsConstructor - ); - } - - /** - * Perform a Query operation using the schema accessible via the - * {DynamoDbSchema} method and the table name accessible via the - * {DynamoDbTable} method on the prototype of the constructor supplied. - * - * @param valueConstructor The constructor to use for each query result. - * @param keyCondition A condition identifying a particular hash key - * value. - * @param options Additional options for customizing the Query - * operation - * - * @return An asynchronous iterator that yields query results. Intended - * to be consumed with a `for await ... of` loop. - */ - query( - valueConstructor: ZeroArgumentsConstructor, - keyCondition: ConditionExpression | - {[propertyName: string]: ConditionExpressionPredicate|any}, - options?: QueryOptions - ): QueryIterator; - - /** - * @deprecated - * - * @param parameters Named parameter object - */ - query( - parameters: QueryParameters - ): QueryIterator; - - query( - valueConstructorOrParameters: ZeroArgumentsConstructor|QueryParameters, - keyCondition?: ConditionExpression | - {[propertyName: string]: ConditionExpressionPredicate|any}, - options: QueryOptions = {} - ) { - let valueConstructor: ZeroArgumentsConstructor; - if (!keyCondition) { - valueConstructor = (valueConstructorOrParameters as QueryParameters).valueConstructor; - keyCondition = (valueConstructorOrParameters as QueryParameters).keyCondition; - options = (valueConstructorOrParameters as QueryParameters); - } else { - valueConstructor = valueConstructorOrParameters as ZeroArgumentsConstructor; - } - - return new QueryIterator( - this.client, - valueConstructor, - keyCondition, - { - readConsistency: this.readConsistency, - ...options, - tableNamePrefix: this.tableNamePrefix, - } - ); - } - - /** - * Perform a Scan operation using the schema accessible via the - * {DynamoDbSchema} method and the table name accessible via the - * {DynamoDbTable} method on the prototype of the constructor supplied. - * - * @param valueConstructor The constructor to use for each item returned by - * the Scan operation. - * @param options Additional options for customizing the Scan - * operation - * - * @return An asynchronous iterator that yields scan results. Intended - * to be consumed with a `for await ... of` loop. - */ - scan( - valueConstructor: ZeroArgumentsConstructor, - options?: ScanOptions|ParallelScanWorkerOptions - ): ScanIterator; - - /** - * @deprecated - */ - scan( - parameters: ScanParameters|ParallelScanWorkerParameters - ): ScanIterator; - - scan( - ctorOrParams: ZeroArgumentsConstructor | - ScanParameters | - ParallelScanWorkerParameters, - options: ScanOptions|ParallelScanWorkerOptions = {} - ): ScanIterator { - let valueConstructor: ZeroArgumentsConstructor; - if ( - 'valueConstructor' in ctorOrParams && - (ctorOrParams as ScanParameters).valueConstructor.prototype && - (ctorOrParams as any).valueConstructor.prototype[DynamoDbTable] - ) { - valueConstructor = (ctorOrParams as ScanParameters).valueConstructor; - options = ctorOrParams as ScanParameters; - } else { - valueConstructor = ctorOrParams as ZeroArgumentsConstructor; - } - - return new ScanIterator( - this.client, - valueConstructor, - { - readConsistency: this.readConsistency, - ...options, - tableNamePrefix: this.tableNamePrefix, - } - ); - } - - /** - * Perform an UpdateItem operation using the schema accessible via the - * {DynamoDbSchema} method and the table name accessible via the - * {DynamoDbTable} method on the item supplied. - * - * @param item The item to save to DynamoDB - * @param options Options to configure the UpdateItem operation - */ - update( - item: T, - options?: UpdateOptions - ): Promise; - - /** - * @deprecated - */ - update( - parameters: UpdateParameters - ): Promise; - - async update( - itemOrParameters: T|UpdateParameters, - options: UpdateOptions = {} - ): Promise { - let item: T; - if ( - 'item' in itemOrParameters && - (itemOrParameters as any).item[DynamoDbTable] - ) { - item = (itemOrParameters as UpdateParameters).item; - options = itemOrParameters as UpdateParameters; - } else { - item = itemOrParameters as T; - } - let { - condition, - onMissing = 'remove', - skipVersionCheck = this.skipVersionCheck, - } = options; - - const schema = getSchema(item); - const expr = new UpdateExpression(); - const itemKey: {[propertyName: string]: any} = {}; - - for (const key of Object.keys(schema)) { - let inputMember = item[key]; - const fieldSchema = schema[key]; - - if (isKey(fieldSchema)) { - itemKey[key] = inputMember; - } else if (isVersionAttribute(fieldSchema)) { - const {condition: versionCond, value} = handleVersionAttribute( - key, - inputMember - ); - expr.set(key, value); - - if (!skipVersionCheck) { - condition = condition - ? {type: 'And', conditions: [condition, versionCond]} - : versionCond; - } - } else if (inputMember === undefined) { - if (onMissing === 'remove') { - expr.remove(key); - } - } else { - const marshalled = marshallValue(fieldSchema, inputMember); - if (marshalled) { - expr.set(key, new AttributeValue(marshalled)); - } - } - } - - return this.doExecuteUpdateExpression( - expr, - itemKey, - getSchema(item), - getTableName(item), - item.constructor as ZeroArgumentsConstructor, - {condition} - ); - } - - /** - * Execute a custom update expression using the schema and table name - * defined on the provided `valueConstructor`. - * - * This method does not support automatic version checking, as the current - * state of a table's version attribute cannot be inferred from an update - * expression object. To perform a version check manually, add a condition - * expression: - * - * ```typescript - * const currentVersion = 1; - * updateExpression.set('nameOfVersionAttribute', currentVersion + 1); - * const condition = { - * type: 'Equals', - * subject: 'nameOfVersionAttribute', - * object: currentVersion - * }; - * - * const updated = await mapper.executeUpdateExpression( - * updateExpression, - * itemKey, - * constructor, - * {condition} - * ); - * ``` - * - * **NB:** Property names and attribute paths in the update expression - * should reflect the names used in the schema. - * - * @param expression The update expression to execute. - * @param key The full key to identify the object being - * updated. - * @param valueConstructor The constructor with which to map the result to - * a domain object. - * @param options Options with which to customize the UpdateItem - * request. - * - * @returns The updated item. - */ - async executeUpdateExpression< - T extends StringToAnyObjectMap = StringToAnyObjectMap - >( - expression: UpdateExpression, - key: {[propertyName: string]: any}, - valueConstructor: ZeroArgumentsConstructor, - options: ExecuteUpdateExpressionOptions = {} - ): Promise { - return this.doExecuteUpdateExpression( - expression, - key, - getSchema(valueConstructor.prototype), - getTableName(valueConstructor.prototype), - valueConstructor, - options - ); - } - - private async doExecuteUpdateExpression< - T extends StringToAnyObjectMap = StringToAnyObjectMap - >( - expression: UpdateExpression, - key: {[propertyName: string]: any}, - schema: Schema, - tableName: string, - valueConstructor?: ZeroArgumentsConstructor, - options: ExecuteUpdateExpressionOptions = {} - ): Promise { - const req: UpdateItemInput = { - TableName: this.tableNamePrefix + tableName, - ReturnValues: 'ALL_NEW', - Key: marshallKey(schema, key), - }; - - const attributes = new ExpressionAttributes(); - - if (options.condition) { - req.ConditionExpression = marshallConditionExpression( - options.condition, - schema, - attributes - ).expression; - } - - req.UpdateExpression = marshallUpdateExpression( - expression, - schema, - attributes - ).expression; - - if (Object.keys(attributes.names).length > 0) { - req.ExpressionAttributeNames = attributes.names; - } - - if (Object.keys(attributes.values).length > 0) { - req.ExpressionAttributeValues = attributes.values; - } - - const rawResponse = await this.client.updateItem(req).promise(); - if (rawResponse.Attributes) { - return unmarshallItem(schema, rawResponse.Attributes, valueConstructor); - } - - // this branch should not be reached when interacting with DynamoDB, as - // the ReturnValues parameter is hardcoded to 'ALL_NEW' above. It is, - // however, allowed by the service model and may therefore occur in - // certain unforeseen conditions; to be safe, this case should be - // converted into an error unless a compelling reason to return - // undefined or an empty object presents itself. - throw new Error( - 'Update operation completed successfully, but the updated value was not returned' - ); - } - - private getTableName(item: StringToAnyObjectMap): string { - return getTableName(item, this.tableNamePrefix); - } - - private async *mapGetBatch( - items: SyncOrAsyncIterable, - state: BatchState, - options: {[tableName: string]: BatchGetTableOptions}, - convertedOptions: PerTableOptions - ): AsyncIterableIterator<[string, AttributeMap]> { - for await (const item of items) { - const unprefixed = getTableName(item); - const tableName = this.tableNamePrefix + unprefixed; - const schema = getSchema(item); - - if (unprefixed in options && !(tableName in convertedOptions)) { - convertedOptions[tableName] = convertBatchGetOptions( - options[unprefixed], - schema - ); - } - - if (!(tableName in state)) { - state[tableName] = { - keyProperties: getKeyProperties(schema), - itemSchemata: {} - }; - } - - const {keyProperties, itemSchemata} = state[tableName]; - const marshalled = marshallKey(schema, item); - itemSchemata[itemIdentifier(marshalled, keyProperties)] = { - constructor: item.constructor as ZeroArgumentsConstructor, - schema, - }; - - yield [tableName, marshalled]; - } - } - - private async *mapWriteBatch( - items: SyncOrAsyncIterable<[WriteType, T]>, - state: BatchState - ): AsyncIterableIterator<[string, WriteRequest]> { - for await (const [type, item] of items) { - const unprefixed = getTableName(item); - const tableName = this.tableNamePrefix + unprefixed; - const schema = getSchema(item); - - if (!(tableName in state)) { - state[tableName] = { - keyProperties: getKeyProperties(schema), - itemSchemata: {} - }; - } - - const {keyProperties, itemSchemata} = state[tableName]; - const attributes = type === 'delete' - ? marshallKey(schema, item) - : marshallItem(schema, item); - const marshalled = type === 'delete' - ? {DeleteRequest: {Key: attributes}} - : {PutRequest: {Item: attributes}} - itemSchemata[itemIdentifier(attributes, keyProperties)] = { - constructor: item.constructor as ZeroArgumentsConstructor, - schema, - }; - - yield [tableName, marshalled]; - } - } -} - -function attributeDefinitionList( - attributes: AttributeTypeMap -): Array { - return Object.keys(attributes).map(name => ({ - AttributeName: name, - AttributeType: attributes[name] - })); -} - -function convertBatchGetOptions( - options: BatchGetTableOptions, - itemSchema: Schema -): TableOptions { - const out: TableOptions = {}; - - if (options.readConsistency === 'strong') { - out.ConsistentRead = true; - } - - if (options.projection) { - const attributes = new ExpressionAttributes(); - out.ProjectionExpression = serializeProjectionExpression( - options.projection.map( - propName => toSchemaName( - propName, - options.projectionSchema || itemSchema - ) - ), - attributes - ); - out.ExpressionAttributeNames = attributes.names; - } - - return out; -} - -function getKeyProperties(schema: Schema): Array { - const keys: Array = []; - for (const property of Object.keys(schema).sort()) { - const fieldSchema = schema[property]; - if (isKey(fieldSchema)) { - keys.push(fieldSchema.attributeName || property); - } - } - - return keys; -} - -function handleVersionAttribute( - attributeName: string, - inputMember: any, -): {condition: ConditionExpression, value: MathematicalExpression|AttributeValue} { - let condition: ConditionExpression; - let value: any; - if (inputMember === undefined) { - condition = new FunctionExpression( - 'attribute_not_exists', - new AttributePath([ - {type: 'AttributeName', name: attributeName} as PathElement - ]) - ); - value = new AttributeValue({N: "0"}); - } else { - condition = { - type: 'Equals', - subject: attributeName, - object: inputMember, - }; - value = new MathematicalExpression( - new AttributePath(attributeName), - '+', - 1 - ); - } - - return {condition, value}; -} - -function indexDefinitions( - keys: PerIndexKeys, - options: PerIndexOptions, - schema: Schema -): { - GlobalSecondaryIndexes?: GlobalSecondaryIndexList; - LocalSecondaryIndexes?: LocalSecondaryIndexList; -} { - const globalIndices: GlobalSecondaryIndexList = []; - const localIndices: LocalSecondaryIndexList = []; - - for (const IndexName of Object.keys(keys)) { - const KeySchema = keyTypesToElementList(keys[IndexName]); - const indexOptions = options[IndexName]; - if (!indexOptions) { - throw new Error(`No options provided for ${IndexName} index`); - } - - const indexInfo = { - IndexName, - KeySchema, - Projection: indexProjection(schema, indexOptions.projection), - }; - if (indexOptions.type === 'local') { - localIndices.push(indexInfo); - } else { - globalIndices.push({ - ...indexInfo, - ...provisionedThroughput(indexOptions.readCapacityUnits, indexOptions.writeCapacityUnits), - }); - } - } - - return { - GlobalSecondaryIndexes: globalIndices.length ? globalIndices : void 0, - LocalSecondaryIndexes: localIndices.length ? localIndices : void 0, - }; -} - -function indexProjection( - schema: Schema, - projection: SecondaryIndexProjection -): Projection { - if (typeof projection === 'string') { - return { - ProjectionType: projection === 'all' ? 'ALL' : 'KEYS_ONLY', - } - } - - return { - ProjectionType: 'INCLUDE', - NonKeyAttributes: projection.map(propName => getSchemaName(propName, schema)) - }; -} - -function isIterable(arg: any): arg is Iterable { - return Boolean(arg) && typeof arg[Symbol.iterator] === 'function'; -} - -function isVersionAttribute(fieldSchema: SchemaType): boolean { - return fieldSchema.type === 'Number' - && Boolean(fieldSchema.versionAttribute); -} - -function itemIdentifier( - marshalled: AttributeMap, - keyProperties: Array -): string { - const keyAttributes: Array = []; - for (const key of keyProperties) { - const value = marshalled[key]; - keyAttributes.push(`${key}=${value.B || value.N || value.S}`); - } - - return keyAttributes.join(':'); -} - -function keyTypesToElementList(keys: KeyTypeMap): Array { - const elementList = Object.keys(keys).map(name => ({ - AttributeName: name, - KeyType: keys[name] - })); - - elementList.sort((a, b) => { - if (a.KeyType === 'HASH' && b.KeyType !== 'HASH') { - return -1; - } - if (a.KeyType !== 'HASH' && b.KeyType === 'HASH') { - return 1; - } - return 0; - }); - - return elementList; -} - -function provisionedThroughput( - readCapacityUnits?: number, - writeCapacityUnits?: number -): { - ProvisionedThroughput?: ProvisionedThroughput -} { - let capacityUnits; - if (typeof readCapacityUnits === 'number' && typeof writeCapacityUnits === 'number') { - capacityUnits = { - ReadCapacityUnits: readCapacityUnits, - WriteCapacityUnits: writeCapacityUnits, - }; - } - - return { - ...(capacityUnits && { ProvisionedThroughput: capacityUnits }) - }; -} - diff --git a/packages/dynamodb-data-mapper/src/ItemNotFoundException.spec.ts b/packages/dynamodb-data-mapper/src/ItemNotFoundException.spec.ts deleted file mode 100644 index b3ebc783..00000000 --- a/packages/dynamodb-data-mapper/src/ItemNotFoundException.spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -import {ItemNotFoundException} from "./ItemNotFoundException"; -import {GetItemInput} from "aws-sdk/clients/dynamodb"; - -describe('ItemNotFoundException', () => { - it('should include the request sent as part of the error', () => { - const getItemInput: GetItemInput = { - TableName: 'foo', - Key: { - fizz: {S: 'buzz'}, - }, - }; - - const exception = new ItemNotFoundException(getItemInput, 'message'); - expect(exception.message).toBe('message'); - expect(exception.itemSought).toBe(getItemInput); - }); - - it('should identify itself by name', () => { - const exception = new ItemNotFoundException({} as any, 'message'); - expect(exception.name).toBe('ItemNotFoundException'); - }); - - it( - 'should construct a default message from the item sought if no message supplied', - () => { - const exception = new ItemNotFoundException({Key: {foo: {S: "bar"}}, TableName: "MyTable"}); - expect(exception.message).toBe( - 'No item with the key {"foo":{"S":"bar"}} found in the MyTable table.' - ); - } - ); -}); diff --git a/packages/dynamodb-data-mapper/src/ItemNotFoundException.ts b/packages/dynamodb-data-mapper/src/ItemNotFoundException.ts deleted file mode 100644 index 57d0bbb1..00000000 --- a/packages/dynamodb-data-mapper/src/ItemNotFoundException.ts +++ /dev/null @@ -1,23 +0,0 @@ -import {GetItemInput} from "aws-sdk/clients/dynamodb"; - -/** - * An exception thrown when an item was sought with a DynamoDB::GetItem - * request and not found. Includes the original request sent as - * `itemSought`. - */ -export class ItemNotFoundException extends Error { - readonly name = 'ItemNotFoundException'; - - constructor( - public readonly itemSought: GetItemInput, - message: string = defaultErrorMessage(itemSought) - ) { - super(message); - } -} - -function defaultErrorMessage(itemSought: GetItemInput): string { - return `No item with the key ${ - JSON.stringify(itemSought.Key) - } found in the ${itemSought.TableName} table.`; -} diff --git a/packages/dynamodb-data-mapper/src/Iterator.ts b/packages/dynamodb-data-mapper/src/Iterator.ts deleted file mode 100644 index be36ace8..00000000 --- a/packages/dynamodb-data-mapper/src/Iterator.ts +++ /dev/null @@ -1,112 +0,0 @@ -import { Paginator as AbstractPaginator } from './Paginator'; -import { ConsumedCapacity } from 'aws-sdk/clients/dynamodb'; - -require('./asyncIteratorSymbolPolyfill'); - -export abstract class Iterator< - T, - Paginator extends AbstractPaginator -> implements AsyncIterableIterator { - private _count = 0; - private lastResolved: Promise> = Promise.resolve() as any; - private readonly pending: Array = []; - - protected lastYielded?: T; - - protected constructor(private readonly paginator: Paginator) {} - - /** - * @inheritDoc - */ - [Symbol.asyncIterator]() { - return this; - } - - /** - * @inheritDoc - */ - next(): Promise> { - this.lastResolved = this.lastResolved.then(() => this.getNext()); - return this.lastResolved; - } - - /** - * Detaches the underlying paginator from this iterator and returns it. The - * paginator will yield arrays of unmarshalled items, with each yielded - * array corresponding to a single call to the underlying API. As with the - * underlying API, pages may contain a variable number of items or no items, - * in which case an empty array will be yielded. - * - * Calling this method will disable further iteration. - */ - pages(): Paginator { - // Prevent the iterator from being used further and squelch any uncaught - // promise rejection warnings - this.lastResolved = Promise.reject(new Error( - 'The underlying paginator has been detached from this iterator.' - )); - this.lastResolved.catch(() => {}); - - return this.paginator; - } - - /** - * @inheritDoc - */ - return(): Promise> { - // Prevent any further use of this iterator - this.lastResolved = Promise.reject(new Error( - 'Iteration has been manually interrupted and may not be resumed' - )); - this.lastResolved.catch(() => {}); - - // Empty the pending queue to free up memory - this.pending.length = 0; - return this.paginator.return() as any; - } - - /** - * Retrieve the reported capacity consumed by this iterator. Will be - * undefined unless returned consumed capacity is requested. - */ - get consumedCapacity(): ConsumedCapacity|undefined { - return this.paginator.consumedCapacity; - } - - /** - * Retrieve the number of items yielded thus far by this iterator. - */ - get count() { - return this._count; - } - - /** - * Retrieve the number of items scanned thus far during the execution of - * this iterator. This number should be the same as {@link count} unless a - * filter expression was used. - */ - get scannedCount() { - return this.paginator.scannedCount; - } - - private async getNext(): Promise> { - if (this.pending.length > 0) { - this.lastYielded = this.pending.shift()!; - this._count++; - return { - done: false, - value: this.lastYielded - } - } - - return this.paginator.next().then(({value = [], done}) => { - if (!done) { - this.pending.push(...value); - return this.getNext(); - } - - this.lastYielded = undefined; - return {done: true} as IteratorResult; - }); - } -} diff --git a/packages/dynamodb-data-mapper/src/Paginator.ts b/packages/dynamodb-data-mapper/src/Paginator.ts deleted file mode 100644 index 81ad9757..00000000 --- a/packages/dynamodb-data-mapper/src/Paginator.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { getSchema } from './protocols'; -import { DynamoDbPaginatorInterface } from '@aws/dynamodb-query-iterator'; -import { - Schema, - unmarshallItem, - ZeroArgumentsConstructor, -} from '@aws/dynamodb-data-marshaller'; -import { ConsumedCapacity } from 'aws-sdk/clients/dynamodb'; - -require('./asyncIteratorSymbolPolyfill'); - -export abstract class Paginator implements AsyncIterableIterator> { - private readonly itemSchema: Schema; - private lastKey?: T; - private lastResolved: Promise>> = Promise.resolve() as any; - - protected constructor( - private readonly paginator: DynamoDbPaginatorInterface, - private readonly valueConstructor: ZeroArgumentsConstructor - ) { - this.itemSchema = getSchema(valueConstructor.prototype); - } - - /** - * @inheritDoc - */ - [Symbol.asyncIterator]() { - return this; - } - - /** - * @inheritDoc - */ - next(): Promise>> { - this.lastResolved = this.lastResolved.then(() => this.getNext()); - return this.lastResolved; - } - - /** - * @inheritDoc - */ - return(): Promise>> { - // Prevent any further use of this iterator - this.lastResolved = Promise.reject(new Error( - 'Iteration has been manually interrupted and may not be resumed' - )); - this.lastResolved.catch(() => {}); - - return this.paginator.return() as any; - } - - /** - * Retrieve the reported capacity consumed by this paginator. Will be - * undefined unless returned consumed capacity is requested. - */ - get consumedCapacity(): ConsumedCapacity|undefined { - return this.paginator.consumedCapacity; - } - - /** - * Retrieve the number of items yielded thus far by this paginator. - */ - get count() { - return this.paginator.count; - } - - /** - * Retrieve the last reported `LastEvaluatedKey`, unmarshalled according to - * the schema used by this paginator. - */ - get lastEvaluatedKey(): Partial|undefined { - return this.lastKey; - } - - /** - * Retrieve the number of items scanned thus far during the execution of - * this paginator. This number should be the same as {@link count} unless a - * filter expression was used. - */ - get scannedCount() { - return this.paginator.scannedCount; - } - - private async getNext(): Promise>> { - return this.paginator.next().then(({value = {}, done}) => { - if (!done) { - this.lastKey = value.LastEvaluatedKey && unmarshallItem( - this.itemSchema, - value.LastEvaluatedKey, - this.valueConstructor - ); - - return { - value: (value.Items || []).map(item => unmarshallItem( - this.itemSchema, - item, - this.valueConstructor - )), - done: false - }; - } - - return {done: true} as IteratorResult>; - }); - } -} diff --git a/packages/dynamodb-data-mapper/src/ParallelScanIterator.ts b/packages/dynamodb-data-mapper/src/ParallelScanIterator.ts deleted file mode 100644 index b329e8c4..00000000 --- a/packages/dynamodb-data-mapper/src/ParallelScanIterator.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { Iterator } from './Iterator'; -import { ParallelScanOptions } from './namedParameters'; -import { ParallelScanPaginator } from './ParallelScanPaginator'; -import { ZeroArgumentsConstructor } from '@aws/dynamodb-data-marshaller'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -/** - * Iterates over each item returned by a parallel DynamoDB scan until no more - * pages are available. - */ -export class ParallelScanIterator extends - Iterator> -{ - constructor( - client: DynamoDB, - itemConstructor: ZeroArgumentsConstructor, - segments: number, - options: ParallelScanOptions & { tableNamePrefix?: string } = {} - ) { - super(new ParallelScanPaginator( - client, - itemConstructor, - segments, - options - )); - } -} diff --git a/packages/dynamodb-data-mapper/src/ParallelScanPaginator.ts b/packages/dynamodb-data-mapper/src/ParallelScanPaginator.ts deleted file mode 100644 index bb38e5b7..00000000 --- a/packages/dynamodb-data-mapper/src/ParallelScanPaginator.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { buildScanInput } from './buildScanInput'; -import { - ParallelScanOptions, - ParallelScanState, - ScanState, -} from './namedParameters'; -import { Paginator } from './Paginator'; -import { getSchema } from './protocols'; -import { - ParallelScanInput, - ParallelScanPaginator as BasePaginator, - ParallelScanState as BaseParallelScanState, - ScanState as BaseScanState, -} from '@aws/dynamodb-query-iterator'; -import { - marshallKey, - Schema, - unmarshallItem, - ZeroArgumentsConstructor, -} from '@aws/dynamodb-data-marshaller'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -/** - * Iterates over each page of items returned by a parallel DynamoDB scan until - * no more pages are available. - */ -export class ParallelScanPaginator extends Paginator { - private readonly _ctor: ZeroArgumentsConstructor; - private readonly _paginator: BasePaginator; - private readonly _schema: Schema; - - constructor( - client: DynamoDB, - itemConstructor: ZeroArgumentsConstructor, - segments: number, - options: ParallelScanOptions & { tableNamePrefix?: string } = {} - ) { - const schema = getSchema(itemConstructor.prototype); - const input: ParallelScanInput = { - ...buildScanInput(itemConstructor, options), - TotalSegments: segments, - ExclusiveStartKey: undefined, - Segment: undefined - }; - - let scanState: BaseParallelScanState|undefined; - if (options.scanState) { - scanState = options.scanState.map( - ({initialized, lastEvaluatedKey: lastKey}) => ({ - initialized, - LastEvaluatedKey: lastKey - ? marshallKey(schema, lastKey, options.indexName) - : undefined - } as BaseScanState) - ); - } - - const paginator = new BasePaginator(client, input, scanState); - super(paginator, itemConstructor); - - this._paginator = paginator; - this._ctor = itemConstructor; - this._schema = schema; - } - - /** - * The `lastEvaluatedKey` attribute is not available on parallel scans. Use - * {@link scanState} instead. - */ - get lastEvaluatedKey() { - return undefined; - } - - /** - * A snapshot of the current state of a parallel scan. May be used to resume - * a parallel scan with a separate paginator. - */ - get scanState(): ParallelScanState { - return this._paginator.scanState.map( - ({initialized, LastEvaluatedKey}) => ({ - initialized, - lastEvaluatedKey: LastEvaluatedKey - ? unmarshallItem(this._schema, LastEvaluatedKey, this._ctor) - : undefined - } as ScanState) - ); - } -} diff --git a/packages/dynamodb-data-mapper/src/QueryIterator.ts b/packages/dynamodb-data-mapper/src/QueryIterator.ts deleted file mode 100644 index 0369828d..00000000 --- a/packages/dynamodb-data-mapper/src/QueryIterator.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { Iterator } from './Iterator'; -import { QueryOptions } from './namedParameters'; -import { QueryPaginator } from './QueryPaginator'; -import { ZeroArgumentsConstructor } from '@aws/dynamodb-data-marshaller'; -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -/** - * Iterates over each item returned by a DynamoDB query until no more pages are - * available. - */ -export class QueryIterator extends Iterator> { - constructor( - client: DynamoDB, - valueConstructor: ZeroArgumentsConstructor, - keyCondition: ConditionExpression | - {[propertyName: string]: ConditionExpressionPredicate|any}, - options?: QueryOptions & {tableNamePrefix?: string} - ) { - super( - new QueryPaginator(client, valueConstructor, keyCondition, options) - ); - } -} diff --git a/packages/dynamodb-data-mapper/src/QueryPaginator.ts b/packages/dynamodb-data-mapper/src/QueryPaginator.ts deleted file mode 100644 index c08185a4..00000000 --- a/packages/dynamodb-data-mapper/src/QueryPaginator.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { marshallStartKey } from './marshallStartKey'; -import { QueryOptions } from './namedParameters'; -import { Paginator } from './Paginator'; -import { getSchema, getTableName } from './protocols'; -import { QueryPaginator as BasePaginator } from '@aws/dynamodb-query-iterator'; -import { - marshallConditionExpression, - marshallProjectionExpression, - ZeroArgumentsConstructor, -} from '@aws/dynamodb-data-marshaller'; -import { - ConditionExpression, - ConditionExpressionPredicate, - ExpressionAttributes, - isConditionExpression, - isConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; -import { QueryInput } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -/** - * Iterates over each page of items returned by a DynamoDB query until no more - * pages are available. - */ -export class QueryPaginator extends Paginator { - constructor( - client: DynamoDB, - valueConstructor: ZeroArgumentsConstructor, - keyCondition: ConditionExpression | - {[propertyName: string]: ConditionExpressionPredicate|any}, - options: QueryOptions & {tableNamePrefix?: string} = {} - ) { - const itemSchema = getSchema(valueConstructor.prototype); - - let { - filter, - indexName, - limit, - pageSize, - projection, - readConsistency, - scanIndexForward, - startKey, - tableNamePrefix: prefix, - } = options; - - const req: QueryInput = { - TableName: getTableName(valueConstructor.prototype, prefix), - ScanIndexForward: scanIndexForward, - Limit: pageSize, - IndexName: indexName, - }; - - if (readConsistency === 'strong') { - req.ConsistentRead = true; - } - - const attributes = new ExpressionAttributes(); - req.KeyConditionExpression = marshallConditionExpression( - normalizeKeyCondition(keyCondition), - itemSchema, - attributes - ).expression; - - if (filter) { - req.FilterExpression = marshallConditionExpression( - filter, - itemSchema, - attributes - ).expression; - } - - if (projection) { - req.ProjectionExpression = marshallProjectionExpression( - projection, - itemSchema, - attributes - ).expression; - } - - if (Object.keys(attributes.names).length > 0) { - req.ExpressionAttributeNames = attributes.names; - } - - if (Object.keys(attributes.values).length > 0) { - req.ExpressionAttributeValues = attributes.values; - } - - if (startKey) { - req.ExclusiveStartKey = marshallStartKey(itemSchema, startKey); - } - - super( - new BasePaginator(client, req, limit), - valueConstructor - ); - } -} - -function normalizeKeyCondition( - keyCondition: ConditionExpression | - {[key: string]: ConditionExpressionPredicate|any} -): ConditionExpression { - if (isConditionExpression(keyCondition)) { - return keyCondition; - } - - const conditions: Array = []; - for (const property of Object.keys(keyCondition)) { - const predicate = keyCondition[property]; - if (isConditionExpressionPredicate(predicate)) { - conditions.push({ - ...predicate, - subject: property, - }); - } else { - conditions.push({ - type: 'Equals', - subject: property, - object: predicate, - }); - } - } - - if (conditions.length === 1) { - return conditions[0]; - } - - return {type: 'And', conditions}; -} diff --git a/packages/dynamodb-data-mapper/src/ScanIterator.ts b/packages/dynamodb-data-mapper/src/ScanIterator.ts deleted file mode 100644 index 1edb4b28..00000000 --- a/packages/dynamodb-data-mapper/src/ScanIterator.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Iterator } from './Iterator'; -import { SequentialScanOptions } from './namedParameters'; -import { ScanPaginator } from './ScanPaginator'; -import { ZeroArgumentsConstructor } from '@aws/dynamodb-data-marshaller'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -/** - * Iterates over each item returned by a DynamoDB scan until no more pages are - * available. - */ -export class ScanIterator extends Iterator> { - constructor( - client: DynamoDB, - valueConstructor: ZeroArgumentsConstructor, - options?: SequentialScanOptions - ) { - super(new ScanPaginator(client, valueConstructor, options)); - } -} diff --git a/packages/dynamodb-data-mapper/src/ScanPaginator.ts b/packages/dynamodb-data-mapper/src/ScanPaginator.ts deleted file mode 100644 index 23f0c4a5..00000000 --- a/packages/dynamodb-data-mapper/src/ScanPaginator.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { buildScanInput } from './buildScanInput'; -import { SequentialScanOptions } from './namedParameters'; -import { Paginator } from './Paginator'; -import { ScanPaginator as BasePaginator } from '@aws/dynamodb-query-iterator'; -import { ZeroArgumentsConstructor } from '@aws/dynamodb-data-marshaller'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -/** - * Iterates over each page of items returned by a DynamoDB scan until no more - * pages are available. - */ -export class ScanPaginator extends Paginator { - constructor( - client: DynamoDB, - itemConstructor: ZeroArgumentsConstructor, - options: SequentialScanOptions = {} - ) { - super( - new BasePaginator(client, buildScanInput(itemConstructor, options), options.limit), - itemConstructor - ); - } -} diff --git a/packages/dynamodb-data-mapper/src/asyncIteratorSymbolPolyfill.ts b/packages/dynamodb-data-mapper/src/asyncIteratorSymbolPolyfill.ts deleted file mode 100644 index e5e1a694..00000000 --- a/packages/dynamodb-data-mapper/src/asyncIteratorSymbolPolyfill.ts +++ /dev/null @@ -1,8 +0,0 @@ -/** - * Provides a simple polyfill for runtime environments that provide a Symbol - * implementation but do not have Symbol.asyncIterator available by default. - */ - -if (Symbol && !Symbol.asyncIterator) { - (Symbol as any).asyncIterator = Symbol.for("__@@asyncIterator__"); -} diff --git a/packages/dynamodb-data-mapper/src/buildScanInput.ts b/packages/dynamodb-data-mapper/src/buildScanInput.ts deleted file mode 100644 index fb1ec660..00000000 --- a/packages/dynamodb-data-mapper/src/buildScanInput.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { marshallStartKey } from './marshallStartKey'; -import { SequentialScanOptions } from './namedParameters'; -import { getSchema, getTableName } from './protocols'; -import { - marshallConditionExpression, - marshallProjectionExpression, - ZeroArgumentsConstructor, -} from '@aws/dynamodb-data-marshaller'; -import { ExpressionAttributes } from '@aws/dynamodb-expressions'; -import { ScanInput } from 'aws-sdk/clients/dynamodb'; - -/** - * @internal - */ -export function buildScanInput( - valueConstructor: ZeroArgumentsConstructor, - options: SequentialScanOptions = {} -): ScanInput { - const { - filter, - indexName, - pageSize, - projection, - readConsistency, - segment, - startKey, - tableNamePrefix: prefix, - totalSegments, - } = options; - - const req: ScanInput = { - TableName: getTableName(valueConstructor.prototype, prefix), - Limit: pageSize, - IndexName: indexName, - Segment: segment, - TotalSegments: totalSegments, - }; - - if (readConsistency === 'strong') { - req.ConsistentRead = true; - } - - const schema = getSchema(valueConstructor.prototype); - - const attributes = new ExpressionAttributes(); - - if (filter) { - req.FilterExpression = marshallConditionExpression( - filter, - schema, - attributes - ).expression; - } - - if (projection) { - req.ProjectionExpression = marshallProjectionExpression( - projection, - schema, - attributes - ).expression; - } - - if (Object.keys(attributes.names).length > 0) { - req.ExpressionAttributeNames = attributes.names; - } - - if (Object.keys(attributes.values).length > 0) { - req.ExpressionAttributeValues = attributes.values; - } - - if (startKey) { - req.ExclusiveStartKey = marshallStartKey(schema, startKey); - } - - return req; -} diff --git a/packages/dynamodb-data-mapper/src/constants.ts b/packages/dynamodb-data-mapper/src/constants.ts deleted file mode 100644 index bc0e8b3b..00000000 --- a/packages/dynamodb-data-mapper/src/constants.ts +++ /dev/null @@ -1,15 +0,0 @@ -export const VERSION = '0.4.0'; - -export const MAX_WRITE_BATCH_SIZE = 25; - -export const MAX_READ_BATCH_SIZE = 100; - -export type OnMissingStrategy = 'remove'|'skip'; - -export type ReadConsistency = 'eventual'|'strong'; - -export interface StringToAnyObjectMap {[key: string]: any;} - -export type SyncOrAsyncIterable = Iterable|AsyncIterable; - -export type WriteType = 'put'|'delete'; diff --git a/packages/dynamodb-data-mapper/src/embed.spec.ts b/packages/dynamodb-data-mapper/src/embed.spec.ts deleted file mode 100644 index 3f234e95..00000000 --- a/packages/dynamodb-data-mapper/src/embed.spec.ts +++ /dev/null @@ -1,35 +0,0 @@ -import {embed} from "./embed"; -import {DynamoDbSchema} from "./protocols"; - -describe('embed', () => { - const schema = {foo: {type: 'String'}}; - class Embeddable {} - Object.defineProperty(Embeddable.prototype, DynamoDbSchema as any, { - value: schema - }); - - it( - 'should return a SchemaType using the embedded schema of a document constructor', - () => { - const schemaType = embed(Embeddable); - - expect(schemaType.type).toBe('Document'); - expect(schemaType.members).toEqual(schema); - expect(schemaType.valueConstructor).toBe(Embeddable); - } - ); - - it('should pass through a defined attributeName', () => { - const attributeName = 'attributeName'; - const schemaType = embed(Embeddable, {attributeName}); - - expect(schemaType.attributeName).toBe(attributeName); - }); - - it('should pass through a defined defaultProvider', () => { - const defaultProvider = () => new Embeddable(); - const schemaType = embed(Embeddable, {defaultProvider}); - - expect(schemaType.defaultProvider).toBe(defaultProvider); - }); -}); diff --git a/packages/dynamodb-data-mapper/src/embed.ts b/packages/dynamodb-data-mapper/src/embed.ts deleted file mode 100644 index 199408ee..00000000 --- a/packages/dynamodb-data-mapper/src/embed.ts +++ /dev/null @@ -1,23 +0,0 @@ -import {DynamoDbSchema} from "./protocols"; -import { - DocumentType, - ZeroArgumentsConstructor, -} from '@aws/dynamodb-data-marshaller'; - -export interface DocumentTypeOptions { - defaultProvider?: () => T; - attributeName?: string; -} - -export function embed( - documentConstructor: ZeroArgumentsConstructor, - {attributeName, defaultProvider}: DocumentTypeOptions = {} -): DocumentType { - return { - type: 'Document', - members: (documentConstructor.prototype as any)[DynamoDbSchema] || {}, - attributeName, - defaultProvider, - valueConstructor: documentConstructor - }; -} diff --git a/packages/dynamodb-data-mapper/src/index.ts b/packages/dynamodb-data-mapper/src/index.ts deleted file mode 100644 index c9ed14e0..00000000 --- a/packages/dynamodb-data-mapper/src/index.ts +++ /dev/null @@ -1,12 +0,0 @@ -export * from './constants'; -export * from './DataMapper'; -export * from './embed'; -export * from './ItemNotFoundException'; -export * from './namedParameters'; -export * from './ParallelScanIterator'; -export * from './ParallelScanPaginator'; -export * from './protocols'; -export * from './QueryIterator'; -export * from './QueryPaginator'; -export * from './ScanIterator'; -export * from './ScanPaginator'; diff --git a/packages/dynamodb-data-mapper/src/marshallStartKey.ts b/packages/dynamodb-data-mapper/src/marshallStartKey.ts deleted file mode 100644 index f34931dd..00000000 --- a/packages/dynamodb-data-mapper/src/marshallStartKey.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { marshallValue, Schema } from '@aws/dynamodb-data-marshaller'; -import { Key } from 'aws-sdk/clients/dynamodb'; - -/** - * @internal - */ -export function marshallStartKey( - schema: Schema, - startKey: {[key: string]: any} -): Key { - const key: Key = {}; - for (const propertyName of Object.keys(startKey)) { - const propSchema = schema[propertyName]; - const { attributeName = propertyName } = propSchema; - if (propSchema) { - key[attributeName] = marshallValue( - propSchema, - startKey[propertyName] - )!; - } - } - - return key; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/BatchGetOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/BatchGetOptions.ts deleted file mode 100644 index 46ec1790..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/BatchGetOptions.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { GetOptions } from './GetOptions'; -import { ReadConsistencyConfiguration } from './ReadConsistencyConfiguration'; -import { Schema } from "@aws/dynamodb-data-marshaller"; - -export interface BatchGetOptions extends ReadConsistencyConfiguration { - /** - * Options to apply to specific tables when performing a batch get operation - * that reads from multiple tables. - */ - perTableOptions?: { - [tableName: string]: BatchGetTableOptions; - }; -} - -export interface BatchGetTableOptions extends GetOptions { - /** - * The schema to use when mapping the supplied `projection` option to the - * attribute names used in DynamoDB. - * - * This parameter is only necessary if a batch contains items from multiple - * classes that map to the *same* table using *different* property names to - * represent the same DynamoDB attributes. - * - * If not supplied, the schema associated with the first item associated - * with a given table will be used in its place. - */ - projectionSchema?: Schema; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/CreateTableOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/CreateTableOptions.ts deleted file mode 100644 index d174090d..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/CreateTableOptions.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { ProvisionedThroughput } from './ProvisionedThroughput'; -import { PerIndexOptions } from './SecondaryIndexOptions'; - -interface BaseCreateTableOptions { - streamViewType?: StreamViewType; - indexOptions?: PerIndexOptions; - billingMode?: BillingMode; - sseSpecification?: SseSpecification; -} - -export interface SseSpecification { - sseType: SseType; - kmsMasterKeyId?: string; -} - -export interface ProvisionedCreateTableOptions extends ProvisionedThroughput, BaseCreateTableOptions { - billingMode?: 'PROVISIONED'; -} - -export interface OnDemandCreateTableOptions extends BaseCreateTableOptions { - billingMode: 'PAY_PER_REQUEST'; -} - -export type CreateTableOptions = ProvisionedCreateTableOptions | OnDemandCreateTableOptions; - -export type BillingMode = 'PROVISIONED' | 'PAY_PER_REQUEST'; - -/** - * Server-side encryption type: - * AES256 - Server-side encryption which uses the AES256 algorithm (not applicable). - * KMS - Server-side encryption which uses AWS Key Management Service. - */ -export type SseType = 'AES256' | 'KMS'; - -export type StreamViewType = - 'NEW_IMAGE' | - 'OLD_IMAGE' | - 'NEW_AND_OLD_IMAGES' | - 'KEYS_ONLY' | - 'NONE'; diff --git a/packages/dynamodb-data-mapper/src/namedParameters/DataMapperConfiguration.ts b/packages/dynamodb-data-mapper/src/namedParameters/DataMapperConfiguration.ts deleted file mode 100644 index 933d40b7..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/DataMapperConfiguration.ts +++ /dev/null @@ -1,27 +0,0 @@ -import DynamoDB = require("aws-sdk/clients/dynamodb"); -import { ReadConsistency } from '../constants'; - -export interface DataMapperConfiguration { - /** - * The low-level DynamoDB client to use to execute API operations. - */ - client: DynamoDB; - - /** - * The default read consistency to use when loading items. If not specified, - * 'eventual' will be used. - */ - readConsistency?: ReadConsistency; - - /** - * Whether operations should NOT by default honor the version attribute - * specified in the schema by incrementing the attribute and preventing the - * operation from taking effect if the local version is out of date. - */ - skipVersionCheck?: boolean; - - /** - * A prefix to apply to all table names. - */ - tableNamePrefix?: string; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/DeleteOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/DeleteOptions.ts deleted file mode 100644 index 3a4c8c10..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/DeleteOptions.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { StringToAnyObjectMap } from '../constants'; -import { ConditionExpression } from '@aws/dynamodb-expressions'; - -export interface DeleteOptions { - /** - * A condition on which this delete operation's completion will be - * predicated. - */ - condition?: ConditionExpression; - - /** - * The values to return from this operation. - */ - returnValues?: 'ALL_OLD'|'NONE'; - - /** - * Whether this operation should NOT honor the version attribute specified - * in the schema by incrementing the attribute and preventing the operation - * from taking effect if the local version is out of date. - */ - skipVersionCheck?: boolean; -} - -/** - * @deprecated - */ -export interface DeleteParameters< - T extends StringToAnyObjectMap = StringToAnyObjectMap -> extends DeleteOptions { - /** - * The item being deleted. - */ - item: T; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/ExecuteUpdateExpressionOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/ExecuteUpdateExpressionOptions.ts deleted file mode 100644 index 3f299b2c..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/ExecuteUpdateExpressionOptions.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { ConditionExpression } from '@aws/dynamodb-expressions'; - -export interface ExecuteUpdateExpressionOptions { - /** - * A condition on which this update operation's completion will be - * predicated. - */ - condition?: ConditionExpression; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/GetOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/GetOptions.ts deleted file mode 100644 index 57c6590c..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/GetOptions.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { ReadConsistencyConfiguration } from './ReadConsistencyConfiguration'; -import { StringToAnyObjectMap } from '../constants'; -import { ProjectionExpression } from "@aws/dynamodb-expressions"; - -export interface GetOptions extends ReadConsistencyConfiguration { - /** - * The item attributes to get. - */ - projection?: ProjectionExpression; -} - -/** - * @deprecated - */ -export interface GetParameters< - T extends StringToAnyObjectMap = StringToAnyObjectMap -> extends GetOptions { - /** - * The item being loaded. - */ - item: T; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/ProvisionedThroughput.ts b/packages/dynamodb-data-mapper/src/namedParameters/ProvisionedThroughput.ts deleted file mode 100644 index aac5be1d..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/ProvisionedThroughput.ts +++ /dev/null @@ -1,4 +0,0 @@ -export interface ProvisionedThroughput { - readCapacityUnits?: number; - writeCapacityUnits?: number; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/PutOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/PutOptions.ts deleted file mode 100644 index b94f0fe5..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/PutOptions.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { StringToAnyObjectMap } from '../constants'; -import { ConditionExpression } from '@aws/dynamodb-expressions'; - -export interface PutOptions { - /** - * A condition on whose evaluation this put operation's completion will be - * predicated. - */ - condition?: ConditionExpression; - - /** - * Whether this operation should NOT honor the version attribute specified - * in the schema by incrementing the attribute and preventing the operation - * from taking effect if the local version is out of date. - */ - skipVersionCheck?: boolean; -} - -/** - * @deprecated - */ -export interface PutParameters< - T extends StringToAnyObjectMap = StringToAnyObjectMap -> extends PutOptions { - /** - * The object to be saved. - */ - item: T; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/QueryOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/QueryOptions.ts deleted file mode 100644 index 946c1a36..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/QueryOptions.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { ReadConsistencyConfiguration } from './ReadConsistencyConfiguration'; -import { StringToAnyObjectMap } from '../constants'; -import { ZeroArgumentsConstructor } from '@aws/dynamodb-data-marshaller'; -import { - ConditionExpression, - ConditionExpressionPredicate, - ProjectionExpression, -} from '@aws/dynamodb-expressions'; - -export interface QueryOptions extends ReadConsistencyConfiguration { - /** - * A condition expression that DynamoDB applies after the Query operation, - * but before the data is returned to you. Items that do not satisfy the - * FilterExpression criteria are not returned. - * - * A FilterExpression does not allow key attributes. You cannot define a - * filter expression based on a partition key or a sort key. - */ - filter?: ConditionExpression; - - /** - * The name of an index to query. This index can be any local secondary - * index or global secondary index on the table. - */ - indexName?: string; - - /** - * The maximum number of items to fetch over all pages of the query. - */ - limit?: number; - - /** - * The maximum number of items to fetch per page of results. - */ - pageSize?: number; - - /** - * The item attributes to get. - */ - projection?: ProjectionExpression; - - /** - * Specifies the order for index traversal: If true, the traversal is - * performed in ascending order; if false, the traversal is performed in - * descending order. - * - * Items with the same partition key value are stored in sorted order by - * sort key. If the sort key data type is Number, the results are stored in - * numeric order. For type String, the results are stored in order of ASCII - * character code values. For type Binary, DynamoDB treats each byte of the - * binary data as unsigned. - */ - scanIndexForward?: boolean; - - /** - * The primary key of the first item that this operation will evaluate. When - * querying an index, only the `lastEvaluatedKey` derived from a previous - * query operation on the same index should be supplied for this parameter. - */ - startKey?: {[key: string]: any}; -} - -/** - * @deprecated - */ -export interface QueryParameters< - T extends StringToAnyObjectMap = StringToAnyObjectMap -> extends QueryOptions { - /** - * The condition that specifies the key value(s) for items to be retrieved - * by the Query action. - */ - keyCondition: ConditionExpression | - {[propertyName: string]: ConditionExpressionPredicate|any}; - - /** - * A constructor that creates objects representing one record returned by - * the query operation. - */ - valueConstructor: ZeroArgumentsConstructor; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/ReadConsistencyConfiguration.ts b/packages/dynamodb-data-mapper/src/namedParameters/ReadConsistencyConfiguration.ts deleted file mode 100644 index 129972d5..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/ReadConsistencyConfiguration.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { ReadConsistency } from '../constants'; - -export interface ReadConsistencyConfiguration { - /** - * The read consistency to require when reading from DynamoDB. - */ - readConsistency?: ReadConsistency; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/ScanOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/ScanOptions.ts deleted file mode 100644 index 9dc6108f..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/ScanOptions.ts +++ /dev/null @@ -1,169 +0,0 @@ -import { ReadConsistencyConfiguration } from './ReadConsistencyConfiguration'; -import { StringToAnyObjectMap } from '../constants'; -import { ZeroArgumentsConstructor } from '@aws/dynamodb-data-marshaller'; -import { - ConditionExpression, - ProjectionExpression, -} from '@aws/dynamodb-expressions'; - -export interface BaseScanOptions extends ReadConsistencyConfiguration { - /** - * A string that contains conditions that DynamoDB applies after the Query - * operation, but before the data is returned to you. Items that do not - * satisfy the FilterExpression criteria are not returned. - * - * A FilterExpression does not allow key attributes. You cannot define a - * filter expression based on a partition key or a sort key. - */ - filter?: ConditionExpression; - - /** - * The name of an index to query. This index can be any local secondary - * index or global secondary index on the table. - */ - indexName?: string; - - /** - * The maximum number of items to fetch per page of results. - */ - pageSize?: number; - - /** - * The item attributes to get. - */ - projection?: ProjectionExpression; -} - -export interface CtorBearer { - /** - * A constructor that creates objects representing one record returned by - * the query operation. - */ - valueConstructor: ZeroArgumentsConstructor; -} - -export interface BaseSequentialScanOptions extends BaseScanOptions { - /** - * The maximum number of items to fetch over all pages of scan. - */ - limit?: number; - - /** - * For a parallel Scan request, Segment identifies an individual segment to - * be scanned by an application worker. - * - * Segment IDs are zero-based, so the first segment is always 0. For - * example, if you want to use four application threads to scan a table or - * an index, then the first thread specifies a Segment value of 0, the - * second thread specifies 1, and so on. - */ - segment?: number; - - /** - * The primary key of the first item that this operation will evaluate. When - * scanning an index, only the `lastEvaluatedKey` derived from a previous - * scan operation on the same index should be supplied for this parameter. - */ - startKey?: {[key: string]: any}; - - /** - * The number of application workers that will perform the scan. - * - * Must be an integer between 1 and 1,000,000 - */ - totalSegments?: number; -} - -export interface ScanOptions extends BaseSequentialScanOptions { - segment?: undefined; - totalSegments?: undefined; -} - -/** - * Pagination state for a scan segment for which the first page has not yet been - * retrieved. - */ -export interface UninitializedScanState { - initialized: false; - lastEvaluatedKey?: undefined; -} - -/** - * Pagination state for a scan segment for which one or more pages have been - * retrieved. If `lastEvaluatedKey` is defined, there are more pages to fetch; - * otherwise, all pages for this segment have been returned. - */ -export interface InitializedScanState { - initialized: true; - lastEvaluatedKey?: {[attributeName: string]: any}; -} - -export type ScanState = UninitializedScanState|InitializedScanState; - -/** - * ParallelScanState is represented as an array whose length is equal to the - * number of segments being scanned independently, with each segment's state - * being stored at the array index corresponding to its segment number. - * - * Segment state is represented with a tagged union with the following keys: - * - `initialized` -- whether the first page of results has been retrieved - * - `lastEvaluatedKey` -- the key to provide (if any) when requesting the - * next page of results. - * - * If `lastEvaluatedKey` is undefined and `initialized` is true, then all pages - * for the given segment have been returned. - */ -export type ParallelScanState = Array; - -export interface ParallelScanOptions extends BaseScanOptions { - /** - * The segment identifier must not be supplied when initiating a parallel - * scan. This identifier will be created for each worker on your behalf. - */ - segment?: undefined; - - /** - * The point from which a parallel scan should resume. - */ - scanState?: ParallelScanState; -} - -/** - * @deprecated - */ -export type ScanParameters< - T extends StringToAnyObjectMap = StringToAnyObjectMap -> = ScanOptions & CtorBearer; - -export interface ParallelScanWorkerOptions extends BaseSequentialScanOptions { - segment: number; - totalSegments: number; -} - -/** - * @deprecated - */ -export type ParallelScanWorkerParameters< - T extends StringToAnyObjectMap = StringToAnyObjectMap -> = ParallelScanWorkerOptions & CtorBearer; - -/** - * @deprecated - */ -export type ParallelScanParameters< - T extends StringToAnyObjectMap = StringToAnyObjectMap -> = BaseScanOptions & CtorBearer & { - /** - * The number of application workers that will perform the scan. - * - * Must be an integer between 1 and 1,000,000 - */ - segments: number; -}; - -/** - * @internal - */ -export type SequentialScanOptions = (ScanOptions|ParallelScanWorkerOptions) & {tableNamePrefix?: string}; - - diff --git a/packages/dynamodb-data-mapper/src/namedParameters/SecondaryIndexOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/SecondaryIndexOptions.ts deleted file mode 100644 index 27c479a2..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/SecondaryIndexOptions.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { ProvisionedThroughput } from './ProvisionedThroughput'; - -export type SecondaryIndexProjection = 'all'|'keys'|Array; - -export interface SharedSecondaryIndexOptions { - projection: SecondaryIndexProjection; -} - -export interface GlobalSecondaryIndexOptions extends - SharedSecondaryIndexOptions, - ProvisionedThroughput -{ - type: 'global'; -} - -export interface LocalSecondaryIndexOptions extends - SharedSecondaryIndexOptions -{ - type: 'local'; -} - -export type SecondaryIndexOptions - = GlobalSecondaryIndexOptions | LocalSecondaryIndexOptions; - -export interface PerIndexOptions { - [indexName: string]: SecondaryIndexOptions; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/UpdateOptions.ts b/packages/dynamodb-data-mapper/src/namedParameters/UpdateOptions.ts deleted file mode 100644 index c582dde8..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/UpdateOptions.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { OnMissingStrategy, StringToAnyObjectMap } from '../constants'; -import { ConditionExpression } from '@aws/dynamodb-expressions'; - -export interface UpdateOptions { - /** - * A condition on whose evaluation this update operation's completion will - * be predicated. - */ - condition?: ConditionExpression; - - /** - * Whether the absence of a value defined in the schema should be treated as - * a directive to remove the property from the item. - */ - onMissing?: OnMissingStrategy; - - /** - * Whether this operation should NOT honor the version attribute specified - * in the schema by incrementing the attribute and preventing the operation - * from taking effect if the local version is out of date. - */ - skipVersionCheck?: boolean; -} - -export interface UpdateParameters< - T extends StringToAnyObjectMap = StringToAnyObjectMap -> extends UpdateOptions { - /** - * The object to be saved. - */ - item: T; -} diff --git a/packages/dynamodb-data-mapper/src/namedParameters/index.ts b/packages/dynamodb-data-mapper/src/namedParameters/index.ts deleted file mode 100644 index f9c1cc05..00000000 --- a/packages/dynamodb-data-mapper/src/namedParameters/index.ts +++ /dev/null @@ -1,12 +0,0 @@ -export * from './BatchGetOptions'; -export * from './CreateTableOptions'; -export * from './DataMapperConfiguration'; -export * from './DeleteOptions'; -export * from './ExecuteUpdateExpressionOptions'; -export * from './GetOptions'; -export * from './ProvisionedThroughput'; -export * from './PutOptions'; -export * from './QueryOptions'; -export * from './ScanOptions'; -export * from './SecondaryIndexOptions'; -export * from './UpdateOptions'; diff --git a/packages/dynamodb-data-mapper/src/protocols.spec.ts b/packages/dynamodb-data-mapper/src/protocols.spec.ts deleted file mode 100644 index aced41bf..00000000 --- a/packages/dynamodb-data-mapper/src/protocols.spec.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { - DynamoDbSchema, - DynamoDbTable, - getSchema, - getTableName, -} from './protocols'; - -describe('getSchema', () => { - it('should return the schema bound at the DynamoDbSchema symbol', () => { - const schema = {}; - expect(getSchema({[DynamoDbSchema]: schema})).toBe(schema); - }); - - it('should throw if the provided object does not have a schema', () => { - expect(() => getSchema({})).toThrow(); - }); -}); - -describe('getTableName', () => { - it('should return the name bound at the DynamoDbTable symbol', () => { - expect(getTableName({[DynamoDbTable]: 'foo'})).toBe('foo'); - }); - - it('should throw if the provided object does not have a table name', () => { - expect(() => getTableName({})).toThrow(); - }); -}); diff --git a/packages/dynamodb-data-mapper/src/protocols.ts b/packages/dynamodb-data-mapper/src/protocols.ts deleted file mode 100644 index 0fbc184f..00000000 --- a/packages/dynamodb-data-mapper/src/protocols.ts +++ /dev/null @@ -1,111 +0,0 @@ -import {Schema} from '@aws/dynamodb-data-marshaller'; - -/** - * Table metadata is reported by items submitted to the data mapper via methods - * identified by symbols. This is done both to disambiguate data (which should - * always be identified by string keys) from metadata and also to allow an - * eventually integration with the First-Class Protocols proposal as described - * at {@link https://github.com/michaelficarra/proposal-first-class-protocols} - * (currently at stage 1 in the ECMAScript change acceptance process). - * - * Because the protocol proposal allows implementation to be declared - * dynamically at runtime (and also because TypeScript does not allow - * user-defined symbols to appear in type declarations), protocol adherence - * should be detected on objects at runtime rather than on types via static - * analysis. - */ - -/** - * Used to designate the mapping of an object from its JavaScript form to its - * representation in a DynamoDB Table or nested map. - * - * @example - * - * class FooDocument { - * [DynamoDbSchema]() { - * return { - * bar: {type: 'String'}, - * baz: {type: 'Number'}, - * }; - * } - * } - */ -export const DynamoDbSchema = Symbol('DynamoDbSchema'); - -export function getSchema(item: any): Schema { - if (item) { - const schema = item[DynamoDbSchema]; - if (schema && typeof schema === 'object') { - return schema; - } - } - - throw new Error( - 'The provided item did not adhere to the DynamoDbDocument protocol.' + - ' No object property was found at the `DynamoDbSchema` symbol' - ); -} - -/** - * Used to designate that an object represents a row of the named DynamoDB - * table. Meant to be used in conjunction with {DynamoDbSchema}. - * - * @example - * - * class FooDocument { - * [DynamoDbTable]() { - * return 'FooTable'; - * } - * - * [DynamoDbSchema]() { - * return { - * bar: {type: 'String'}, - * baz: {type: 'Number'}, - * }; - * } - * } - */ -export const DynamoDbTable = Symbol('DynamoDbTableName'); - -export function getTableName(item: any, tableNamePrefix: string = ''): string { - if (item) { - const tableName = item[DynamoDbTable]; - if (typeof tableName === 'string') { - return tableNamePrefix + tableName; - } - } - - throw new Error( - 'The provided item did not adhere to the DynamoDbTable protocol. No' + - ' string property was found at the `DynamoDbTable` symbol' - ); -} - -/** - * Used to designate which fields on an object have been changed. The method - * identified by this symbol should return a iterable that enumerates the fields - * that have been altered. - * - * @example - * - * class FooDocument { - * constructor() { - * this._dirtyFields = new Set(); - * this._foo = ''; - * } - * - * get foo() { - * return this._foo; - * } - * - * set foo(value) { - * this._foo = value; - * this._dirtyFields.add('foo'); - * } - * - * [DynamoDbDirtyFields]() { - * return this._dirtyFields.values(); - * } - * } - */ -export const DynamoDbDirtyFields = Symbol('DynamoDbDirtyFields'); diff --git a/packages/dynamodb-data-mapper/tsconfig.json b/packages/dynamodb-data-mapper/tsconfig.json deleted file mode 100644 index 7683c068..00000000 --- a/packages/dynamodb-data-mapper/tsconfig.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": [ - "es5", - "es2015.iterable", - "es2015.promise", - "es2015.collection", - "es2015.symbol.wellknown", - "esnext.asynciterable" - ], - "downlevelIteration": true, - "importHelpers": true, - "module": "commonjs", - "noUnusedLocals": true, - "strict": true, - "declaration": true, - "sourceMap": true, - "rootDir": "./src", - "outDir": "./build" - }, - "typedocOptions": { - "mode": "file", - "out": "../../docs/packages/dynamodb-data-mapper", - "excludeNotExported": true, - "excludePrivate": true, - "hideGenerator": true - } -} diff --git a/packages/dynamodb-data-mapper/tsconfig.test.json b/packages/dynamodb-data-mapper/tsconfig.test.json deleted file mode 100644 index 57f7d5b1..00000000 --- a/packages/dynamodb-data-mapper/tsconfig.test.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "sourceMap": false, - "inlineSourceMap": true, - "inlineSources": true, - "rootDir": "./src", - "outDir": "./build" - } -} diff --git a/packages/dynamodb-data-marshaller/.npmignore b/packages/dynamodb-data-marshaller/.npmignore deleted file mode 100644 index 1d116ecc..00000000 --- a/packages/dynamodb-data-marshaller/.npmignore +++ /dev/null @@ -1,10 +0,0 @@ -/src -/node_modules -/coverage - -*.spec.d.ts -*.spec.js -*.spec.js.map - -tsconfig.json -tsconfig.test.json diff --git a/packages/dynamodb-data-marshaller/LICENSE b/packages/dynamodb-data-marshaller/LICENSE deleted file mode 100644 index da05f5c9..00000000 --- a/packages/dynamodb-data-marshaller/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2017 Amazon.com, Inc. or its affiliates - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/dynamodb-data-marshaller/README.md b/packages/dynamodb-data-marshaller/README.md deleted file mode 100644 index b30e6e7a..00000000 --- a/packages/dynamodb-data-marshaller/README.md +++ /dev/null @@ -1,419 +0,0 @@ -# Amazon DynamoDB Data Marshaller - -[![Apache 2 License](https://img.shields.io/github/license/awslabs/dynamodb-data-mapper-js.svg?style=flat)](http://aws.amazon.com/apache-2-0/) - -This library provides an `marshallItem` and `unmarshallItem` functions that -convert native JavaScript values to DynamoDB AttributeValues and back again, -respectively, based on a defined schema. While many JavaScript values map -cleanly to DynamoDB data types and vice versa, schemas allow you to losslessly -persist any JavaScript type, including dates, class instances, and empty -strings. - -## Getting started - -To use the data marshaller, begin by defining a schema that describes the -relationship between your application's domain objects and their serialized form -in a DynamoDB table: - -```javascript -const schema = { - foo: {type: 'Binary'}, - bar: {type: 'Boolean'}, - baz: {type: 'String'}, - quux: { - type: 'Document', - members: { - fizz: {type: 'Set', memberType: 'String'}, - buzz: { - type: 'Tuple', - members: [ - { - type: 'List', - memberType: {type: 'Set', memberType: 'Number'}, - }, - { - type: 'Map', - memberType: {type: 'Date'}, - } - ] - }, - }, - }, -}; -``` - -This schema may be used to marshall JavaScript values to DynamoDB attribute -values: - -```javascript -import {marshallItem} from '@aws/dynamodb-data-marshaller'; - -const marshalled = marshallItem(schema, { - foo: Uint8Array.from([0xde, 0xad, 0xbe, 0xef]), - bar: false, - baz: '', - quux: { - fizz: new Set(['a', 'b', 'c']), - buzz: [ - [ - new Set([1, 2, 3]), - new Set([2, 3, 4]), - new Set([3, 4, 5]), - ], - new Map([ - ['now', new Date()], - ['then', new Date(0)], - ]), - ] - } -}); -``` - -The schema can also be used to unmarshall DynamoDB attribute values back to -their original JavaScript representation: - -```javascript -import {unmarshallItem} from '@aws/dynamodb-data-marshaller'; - -const unmarshalled = unmarshallItem(schema, { - foo: {B: Uint8Array.from([0xde, 0xad, 0xbe, 0xef])}, - bar: {BOOL: false}, - baz: {NULL: true}, - quux: { - fizz: {SS: ['a', 'b', 'c']}, - buzz: { - L: [ - L: [ - {NS: ['1', '2', '3']}, - {NS: ['2', '3', '4']}, - {NS: ['3', '4', '5']}, - ], - M: { - now: {N: '1507189047'}, - then: {N: '0'} - }, - ], - }, - }, -}); -``` - -## Specifying keys - -DynamoDB tables must define a hash key and may optionally define a range key. In -DynamoDB documentation, these keys are sometimes referred to as *partition* and -*sort* keys, respectively. To declare a property to be a key, add a `keyType` -property to its property schema (example taken from the [DynamoDB developer -guide](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GSI.html)): - -```javascript -// Table model taken from http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GSI.html -const gameScores = { - UserId: { - type: 'String', - keyType: 'HASH' - }, - GameTitle: { - type: 'String', - keyType: 'RANGE' - }, - TopScore: {type: 'Number'}, - TopScoreDateTime: {type: 'Date'}, - Wins: {type: 'Number'}, - Losses: {type: 'Number'} -}; -``` - -The `keyType` attribute may only be used in types that are serialized as -strings, numbers, or binary attributes. In addition to `'String'`, `'Number'`, -and `'Binary'` properties, it may be used on `'Date'` and `'Custom'` properties. - -Index keys are specified using an object mapping index names to the key type as -which the value is used in a given index. To continue with the `gameScores` -example given above, you could add the index key declarations described in [the -DynamoDB Global Secondary Index developer guide](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GSI.html) -as follows: - -```javascript -const gameScores = { - UserId: { - type: 'String', - keyType: 'HASH' - }, - GameTitle: { - type: 'String', - keyType: 'RANGE', - indexKeyConfigurations: { - GameTitleIndex: 'HASH' - } - }, - TopScore: { - type: 'Number', - indexKeyConfigurations: { - GameTitleIndex: 'RANGE' - } - }, - TopScoreDateTime: {type: 'Date'}, - Wins: {type: 'Number'}, - Losses: {type: 'Number'} -}; -``` - -## Supplying defaults - -Any property schema may define a `defaultProvider` function to be called when a -field is `undefined` in the input provided to `marshallItem`. This function must -return a raw JavaScript value and should not return an already-marshalled -DynamoDB AttributeValue shape. - -```javascript -const uuidV4 = require('uuid/v4'); - -const schema = { - key: { - type: 'String', - defaultProvider: uuidV4, - keyType: 'HASH', - }, - // ... -}; -``` - -## Supported types - -### Any - -Will be marshalled and unmarshalled using the `@aws/dynamodb-auto-marshaller` -package, which detects the type of a given value at runtime. - -#### Example - -```javascript -const anyProperty = { - type: 'Any', - // optionally, you may specify configuration options for the - // @aws/dynamodb-auto-marshaller package's Marshaller class: - unwrapNumbers: false, - onInvalid: 'omit', - onEmpty: 'nullify', -}; -``` - -### Binary - -Used for `ArrayBuffer` and `ArrayBufferView` objects, as well as Node.JS -buffers. - -**May be used as a table or index key.** - -#### Example - -```javascript -const binaryProperty = {type: 'Binary'}; -``` - -### Boolean - -Used for `true`/`false` values. - -#### Example - -```javascript -const booleanProperty = {type: 'Boolean'}; -``` - -### Collection - -Denotes a list of untyped items. The constituent items will be marshalled and -unmarshalled using the `@aws/dynamodb-auto-marshaller`. - -#### Example - -```javascript -const collectionProperty = { - type: 'Collection', - // optionally, you may specify configuration options for the - // @aws/dynamodb-auto-marshaller package's Marshaller class: - unwrapNumbers: false, - onInvalid: 'omit', - onEmpty: 'nullify', -}; -``` - -### Custom - -Allows the use of bespoke marshalling and unmarshalling functions. The type -definition for a `'Custom'` property must include a `marshall` function that -converts the type's JavaScript representation to a DynamoDB AttributeValue and -an `unmarshall` function that converts the AttributeValue back to a JavaScript -value. - -**May be used as a table or index key.** - -#### Example - -```javascript -// This custom property handles strings -const customProperty = { - type: 'Custom', - marshall(input) { - return {S: input}; - }, - unmarshall(persistedValue) { - return persistedValue.S; - } -}; -``` - -### Date - -Used for time data. Dates will be serialized to DynamoDB as epoch timestamps -for easy integration with DynamoDB's time-to-live feature. As a result, timezone -information will not be persisted. - -**May be used as a table or index key.** - -#### Example - -```javascript -const dateProperty = {type: 'Date'}; -``` - -### Document - -Used for object values that have their own schema and (optionally) constructor. - -#### Example - -```javascript -class MyCustomDocument { - method() { - // pass - } - - get computedProperty() { - // pass - } -} - -class documentSchema = { - fizz: {type: 'String'}, - buzz: {type: 'Number'}, - pop: {type: 'Date'} -} - -const documentProperty = { - type: 'Document', - members: documentSchema, - // optionally, you may specify a constructor to use to create the object - // that will underlie unmarshalled instances. If not specified, - // Object.create(null) will be used. - valueConstructor: MyCustomDocument -}; -``` - -### Hash - -Used for objects with string keys and untyped values. - -#### Example - -```javascript -const collectionProperty = { - type: 'Hash', - // optionally, you may specify configuration options for the - // @aws/dynamodb-auto-marshaller package's Marshaller class: - unwrapNumbers: false, - onInvalid: 'omit', - onEmpty: 'nullify', -}; -``` - -### List - -Used for arrays or iterable objects whose elements are all of the same type. - -#### Example - -```javascript -const listOfStrings = { - type: 'List', - memberType: {type: 'String'} -}; -``` - -### Map - -Used for `Map` objects whose values are all of the same type. - -#### Example - -```javascript -const mapOfStrings = { - type: 'Map', - memberType: {type: 'String'} -}; -``` - -### Null - -Used to serialize `null`. Often used as a sigil value. - -#### Example - -```javascript -const nullProperty = {type: 'Null'}; -``` - -### Number - -Used to serialize numbers. - -**May be used as a table or index key.** - -#### Example - -```javascript -const numberProperty = {type: 'Number'}; -``` - -### Set - -Used to serialize sets whose values are all of the same type. DynamoDB allows -sets of numbers, sets of strings, and sets of binary values. - -#### Example - -```javascript -const binarySetProperty = {type: 'Set', memberType: 'Binary'}; -const numberSetProperty = {type: 'Set', memberType: 'Number'}; -const stringSetProperty = {type: 'Set', memberType: 'String'}; -``` - -### String - -Used to serialize strings. - -**May be used as a table or index key.** - -#### Example - -```javascript -const stringProperty = {type: 'String'}; -``` - -### Tuple - -Used to store arrays that have a specific length and sequence of elements. - -#### Example - -```javascript -const tupleProperty = { - type: 'Tuple', - members: [ - {type: 'Boolean'}, - {type: 'String'} - ] -}; -``` diff --git a/packages/dynamodb-data-marshaller/package.json b/packages/dynamodb-data-marshaller/package.json deleted file mode 100644 index 548833b0..00000000 --- a/packages/dynamodb-data-marshaller/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "@aws/dynamodb-data-marshaller", - "version": "0.7.3", - "description": "A schema-based data marshaller for Amazon DynamoDB", - "keywords": [ - "aws", - "dynamodb" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" - }, - "bugs": { - "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" - }, - "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-data-marshaller/", - "main": "./build/index.js", - "types": "./build/index.d.ts", - "scripts": { - "docs": "typedoc src", - "prepublishOnly": "tsc", - "pretest": "tsc -p tsconfig.test.json", - "test": "jest \"build/(.+).spec.js\"" - }, - "author": { - "name": "AWS SDK for JavaScript Team", - "email": "aws-sdk-js@amazon.com" - }, - "license": "Apache-2.0", - "devDependencies": { - "@types/jest": "^24", - "@types/node": "^8.0.4", - "aws-sdk": "^2.7.0", - "jest": "^24", - "typedoc": "^0.14.0", - "typescript": "^3.4" - }, - "dependencies": { - "@aws/dynamodb-auto-marshaller": "^0.7.1", - "@aws/dynamodb-expressions": "^0.7.3", - "tslib": "^1.9", - "utf8-bytes": "^0.0.1" - }, - "peerDependencies": { - "aws-sdk": "^2.7.0" - } -} diff --git a/packages/dynamodb-data-marshaller/src/InvalidSchemaError.ts b/packages/dynamodb-data-marshaller/src/InvalidSchemaError.ts deleted file mode 100644 index cd6fd23f..00000000 --- a/packages/dynamodb-data-marshaller/src/InvalidSchemaError.ts +++ /dev/null @@ -1,11 +0,0 @@ -import {SchemaType} from "./SchemaType"; - -/** - * An error thrown when a marshaller or unmarshaller cannot understand a node of - * the provided schema. - */ -export class InvalidSchemaError extends Error { - constructor(public readonly node: SchemaType, message?: string) { - super(message); - } -} \ No newline at end of file diff --git a/packages/dynamodb-data-marshaller/src/InvalidValueError.ts b/packages/dynamodb-data-marshaller/src/InvalidValueError.ts deleted file mode 100644 index 81fcbecc..00000000 --- a/packages/dynamodb-data-marshaller/src/InvalidValueError.ts +++ /dev/null @@ -1,12 +0,0 @@ -/** - * An error thrown by the marshaller when a node of the provided input cannot be - * marshalled into the type specified in the schema. - */ -export class InvalidValueError extends Error { - constructor( - public readonly invalidValue: any, - message?: string - ) { - super(message); - } -} \ No newline at end of file diff --git a/packages/dynamodb-data-marshaller/src/KeySchema.ts b/packages/dynamodb-data-marshaller/src/KeySchema.ts deleted file mode 100644 index cb3ccf93..00000000 --- a/packages/dynamodb-data-marshaller/src/KeySchema.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { KeyType } from './SchemaType'; - -export interface AttributeTypeMap { - [attributeName: string]: ScalarAttributeType; -} - -export interface KeySchema { - attributes: AttributeTypeMap; - - tableKeys: KeyTypeMap; - - indexKeys: PerIndexKeys; -} - -export interface KeyTypeMap { - [attributeName: string]: KeyType; -} - -export interface PerIndexKeys { - [indexName: string]: KeyTypeMap; -} - -export type ScalarAttributeType = 'S'|'N'|'B'; diff --git a/packages/dynamodb-data-marshaller/src/Schema.spec.ts b/packages/dynamodb-data-marshaller/src/Schema.spec.ts deleted file mode 100644 index ba5dad76..00000000 --- a/packages/dynamodb-data-marshaller/src/Schema.spec.ts +++ /dev/null @@ -1,49 +0,0 @@ -import {isSchema} from "./Schema"; - -describe('isSchema', () => { - it('should reject scalar values', () => { - for (let scalar of ['string', 123, true, null, void 0]) { - expect(isSchema(scalar)).toBe(false); - } - }); - - it('should accept empty objects', () => { - expect(isSchema({})).toBe(true); - }); - - it('should accept objects whose members are all schema types', () => { - expect(isSchema({ - foo: {type: 'Binary'}, - bar: {type: 'Boolean'}, - baz: {type: 'String'}, - quux: { - type: 'Document', - members: { - fizz: {type: 'Set', memberType: 'String'}, - buzz: { - type: 'Tuple', - members: [ - { - type: 'List', - memberType: {type: 'Set', memberType: 'Number'}, - }, - { - type: 'Map', - memberType: {type: 'Date'}, - } - ] - }, - }, - }, - })).toBe(true); - }); - - it('should reject objects whose members are not all schema types', () => { - expect(isSchema({ - foo: {type: 'Binary'}, - bar: {type: 'Boolean'}, - baz: {type: 'String'}, - quux: 'string', - })).toBe(false); - }); -}); diff --git a/packages/dynamodb-data-marshaller/src/Schema.ts b/packages/dynamodb-data-marshaller/src/Schema.ts deleted file mode 100644 index 368906d9..00000000 --- a/packages/dynamodb-data-marshaller/src/Schema.ts +++ /dev/null @@ -1,26 +0,0 @@ -import {isSchemaType, SchemaType} from "./SchemaType"; - -/** - * A key => value mapping outlining how to convert an arbitrary JavaScript - * object into a strongly typed DynamoDB AttributeMap and back again. - */ -export interface Schema { - [key: string]: SchemaType; -} - -/** - * Evaluates whether the provided argument is a Schema object - */ -export function isSchema(arg: any): arg is Schema { - if (!Boolean(arg) || typeof arg !== 'object') { - return false; - } - - for (let key of Object.keys(arg)) { - if (!isSchemaType(arg[key])) { - return false; - } - } - - return true; -} \ No newline at end of file diff --git a/packages/dynamodb-data-marshaller/src/SchemaType.spec.ts b/packages/dynamodb-data-marshaller/src/SchemaType.spec.ts deleted file mode 100644 index 044424e8..00000000 --- a/packages/dynamodb-data-marshaller/src/SchemaType.spec.ts +++ /dev/null @@ -1,283 +0,0 @@ -import {isSchemaType} from "./SchemaType"; -import {Schema} from "./Schema"; - -describe('isSchemaType', () => { - it('should reject scalar values', () => { - for (let scalar of ['string', 123, true, null, void 0]) { - expect(isSchemaType(scalar)).toBe(false); - } - }); - - it('should accept values with a string `attributeName` property', () => { - expect(isSchemaType({ - type: 'Boolean', - attributeName: 'boolProp' - })).toBe(true); - }); - - it('should reject values with a non-string `attributeName` property', () => { - expect(isSchemaType({ - type: 'Boolean', - attributeName: 123 - })).toBe(false); - }); - - describe('keyable types', () => { - for (let dataType of ['Binary', 'Date', 'Number', 'String']) { - it( - `should accept ${dataType} type declarations with a keyType`, - () => { - expect(isSchemaType({ - type: dataType, - keyType: 'HASH', - })).toBe(true); - } - ); - - it( - `should reject ${dataType} type declarations with an unrecognized keyType`, - () => { - expect(isSchemaType({ - type: dataType, - keyType: 'foo', - })).toBe(false); - } - ); - - it( - `should accept ${dataType} type declarations with a hash of indexKeyConfigurations`, - () => { - expect(isSchemaType({ - type: dataType, - indexKeyConfigurations: {foo: 'HASH'}, - })).toBe(true); - } - ); - - it( - `should reject ${dataType} type declarations with a hash of invalid indexKeyConfigurations`, - () => { - expect(isSchemaType({ - type: dataType, - indexKeyConfigurations: {foo: 'bar', fizz: 'buzz'}, - })).toBe(false); - } - ); - - it( - `should reject ${dataType} type declarations with scalar indexKeyConfiguration`, - () => { - for (let scalar of ['string', 123, null, true]) { - expect(isSchemaType({ - type: dataType, - indexKeyConfigurations: scalar, - })).toBe(false); - } - } - ); - } - }); - - describe('Any types', () => { - it('should accept Any types', () => { - expect(isSchemaType({type: 'Any'})).toBe(true); - }); - }); - - describe('Binary types', () => { - it('should accept Binary types', () => { - expect(isSchemaType({type: 'Binary'})).toBe(true); - }); - }); - - describe('Boolean types', () => { - it('should accept Boolean types', () => { - expect(isSchemaType({type: 'Boolean'})).toBe(true); - }); - }); - - describe('Custom types', () => { - it( - 'should accept Custom types with a defined marshaller and unmarshaller', - () => { - expect(isSchemaType({ - type: 'Custom', - marshall: () => {}, - unmarshall: () => {}, - })).toBe(true); - } - ); - - it('should reject Custom types without a defined marshaller', () => { - expect(isSchemaType({ - type: 'Custom', - unmarshall: () => {}, - })).toBe(false); - }); - - it('should reject Custom types without a defined unmarshaller', () => { - expect(isSchemaType({ - type: 'Custom', - marshall: () => {}, - })).toBe(false); - }); - }); - - describe('Date types', () => { - it('should accept Date types', () => { - expect(isSchemaType({type: 'Date'})).toBe(true); - }); - }); - - describe('Document types', () => { - it('should accept Document types', () => { - expect(isSchemaType({ - type: 'Document', - members: { - str: {type: 'String'} - }, - })).toBe(true); - }); - - it('should reject Document types with non-SchemaType members', () => { - expect(isSchemaType({ - type: 'Document', - members: { - foo: 'bar', - }, - })).toBe(false); - }); - - it('should reject Document types without declared members', () => { - expect(isSchemaType({ - type: 'Document' - })).toBe(false); - }); - - it('should accept Document types with a valueConstructor', () => { - expect(isSchemaType({ - type: 'Document', - members: {}, - valueConstructor: Date, - })).toBe(true); - }); - - it( - 'should reject Document types with a non-function valueConstructor', - () => { - expect(isSchemaType({ - type: 'Document', - members: {}, - valueConstructor: 'foo', - })).toBe(false); - } - ); - }); - - describe('List types', () => { - it('should accept List types', () => { - expect(isSchemaType({ - type: 'List', - memberType: {type: 'Boolean'}, - })).toBe(true); - }); - - it('should reject List types without a defined memberType', () => { - expect(isSchemaType({type: 'List'})).toBe(false); - }); - - it('should reject List types with malformed memberTypes', () => { - expect(isSchemaType({ - type: 'List', - memberType: 'Boolean', - })).toBe(false); - }); - }); - - describe('Map types', () => { - it('should accept Map types', () => { - expect(isSchemaType({ - type: 'Map', - memberType: {type: 'Boolean'}, - })).toBe(true); - }); - - it('should reject Map types without a defined memberType', () => { - expect(isSchemaType({type: 'Map'})).toBe(false); - }); - - it('should reject Map types with malformed memberTypes', () => { - expect(isSchemaType({ - type: 'Map', - memberType: 'Boolean', - })).toBe(false); - }); - }); - - describe('Null types', () => { - it('should accept Null types', () => { - expect(isSchemaType({type: 'Null'})).toBe(true); - }); - }); - - describe('Number types', () => { - it('should accept Number types', () => { - expect(isSchemaType({type: 'Number'})).toBe(true); - }); - }); - - describe('Set types', () => { - it('should accept StringSet types', () => { - expect(isSchemaType({type: 'Set', memberType: 'String'})) - .toBe(true); - }); - - it('should accept NumberSet types', () => { - expect(isSchemaType({type: 'Set', memberType: 'Number'})) - .toBe(true); - }); - - it('should accept BinarySet types', () => { - expect(isSchemaType({type: 'Set', memberType: 'Binary'})) - .toBe(true); - }); - }); - - describe('String types', () => { - it('should accept String types', () => { - expect(isSchemaType({type: 'String'})).toBe(true); - }); - }); - - describe('Tuple types', () => { - it('should accept Tuple types', () => { - expect(isSchemaType({ - type: 'Tuple', - members: [{type: 'Boolean'}, {type: 'String'}], - })).toBe(true); - }); - - it('should reject Tuple types without defined members', () => { - expect(isSchemaType({type: 'Tuple'})).toBe(false); - }); - - it('should reject Tuple types with malformed members', () => { - expect(isSchemaType({ - type: 'Tuple', - members: ['Boolean', 'String'], - })).toBe(false); - }); - }); - - describe('recursive schemas', () => { - it('should accept valid recursive schemas', () => { - const document: Schema = {}; - document.recursive = { - type: "Document", - members: document, - }; - - expect(isSchemaType(document.recursive)).toBe(true); - }); - }); -}); diff --git a/packages/dynamodb-data-marshaller/src/SchemaType.ts b/packages/dynamodb-data-marshaller/src/SchemaType.ts deleted file mode 100644 index 33983c1e..00000000 --- a/packages/dynamodb-data-marshaller/src/SchemaType.ts +++ /dev/null @@ -1,406 +0,0 @@ -import { ScalarAttributeType } from './KeySchema'; -import { Schema } from './Schema'; -import { BinaryValue, MarshallingOptions } from "@aws/dynamodb-auto-marshaller"; -import { AttributeValue } from 'aws-sdk/clients/dynamodb'; - -/** - * The enumeration of types supported by this marshaller package. - */ -export const TypeTags = { - Any: 'Any', - Binary: 'Binary', - Boolean: 'Boolean', - Collection: 'Collection', - Custom: 'Custom', - Date: 'Date', - Document: 'Document', - Hash: 'Hash', - List: 'List', - Map: 'Map', - Null: 'Null', - Number: 'Number', - Set: 'Set', - String: 'String', - Tuple: 'Tuple', -}; - -/** - * A type understood by this marshaller package. - */ -export type TypeTag = keyof typeof TypeTags; - -/** - * An abstract base type defining the common characteristics of all SchemaTypes - */ -export interface BaseType { - /** - * The type of node represented by this object. - */ - type: TypeTag; - - /** - * The key in which this value will be persisted in DynamoDB. If not - * provided, the key will be assumed to be the same in the input and in the - * persisted record. - */ - attributeName?: string; - - /** - * An optional default value factory. If a type has a defined - * defaultProvider and its value is `undefined` in the provided input, the - * defaultProvider will be called and its return value serialized. - */ - defaultProvider?: () => T; -} - -function isBaseType(arg: any): arg is BaseType { - return Boolean(arg) && typeof arg === 'object' - && typeof arg.type === 'string' - && arg.type in TypeTags - && ['string', 'undefined'].indexOf(typeof arg.attributeName) > -1; -} - -/** - * The types of keys a given attribute can represent. - */ -export const KeyTypes = { - HASH: 'HASH', - RANGE: 'RANGE', -}; - -/** - * A type of DynamoDB key. - */ -export type KeyType = keyof typeof KeyTypes; - -/** - * A trait applied to types that may contain a DynamoDB key. - */ -export interface KeyableType { - /** - * Key configuration as it pertains to the DynamoDB table. - */ - keyType?: KeyType; - - /** - * An array of key configurations as they apply to global and local - * secondary indices. - */ - indexKeyConfigurations?: {[key: string]: KeyType}, -} - -function isKeyableType(arg: object): boolean { - const {keyType, indexKeyConfigurations} = arg as any; - - if (!(keyType === undefined || keyType in KeyTypes)) { - return false; - } - - const idxKeysType = typeof indexKeyConfigurations; - - if (indexKeyConfigurations && idxKeysType === 'object') { - for (const indexName of Object.keys(indexKeyConfigurations)) { - if (!(indexKeyConfigurations[indexName] in KeyTypes)) { - return false; - } - } - - return true; - } - - return idxKeysType === 'undefined'; -} - -/** - * A node used to store values whose type is variable or unknown. The value will - * be marshalled an unmarshalled based on runtime type detection, which may - * result in data not being precisely round-tripped (e.g., "empty" types such as - * zero-length strings, buffers, and sets will be returned from the mapper as - * `null` rather than an empty instance of the originally submitted type). - */ -export interface AnyType extends BaseType, MarshallingOptions { - type: 'Any'; -} - -/** - * A node used to store binary data (e.g., Buffer, ArrayBuffer, or - * ArrayBufferView objects). - */ -export interface BinaryType extends BaseType, KeyableType { - type: 'Binary'; -} - -/** - * A node used to store boolean values. - */ -export interface BooleanType extends BaseType { - type: 'Boolean'; -} - -/** - * A node used to store an untyped or mixed collection. Values provided for this - * node will be marshalled using run-time type detection and may not be exactly - * the same when unmarshalled. - */ -export interface CollectionType extends - BaseType>, - MarshallingOptions -{ - type: 'Collection'; -} - -/** - * A node whose type is not managed by the marshaller, but rather by the - * `marshall` and `unmarshall` functions defined in this SchemaType. Useful for - * objects not easily classified using the standard schema taxonomy. - */ -export interface CustomType extends BaseType, KeyableType { - type: 'Custom'; - - /** - * The attribute type to be used for this field when creating or updating - * the DynamoDB table definition for this record. - * - * Required if the custom field is being used as a key and the schema is - * used to create or update a table or index. - */ - attributeType?: ScalarAttributeType; - - /** - * A function that converts an input value into a DynamoDB attribute value. - * This function will not be invoked if the input value is undefined. - * - * @param input The value to be converted. - */ - marshall: (input: JsType)=> AttributeValue; - - /** - * A function that converts a DynamoDB AttributeValue into a JavaScript - * value. - * - * @param persistedValue The value to be converted. - */ - unmarshall: (persistedValue: AttributeValue) => JsType; -} - -/** - * A node represented by a Date object. - * - * Nodes of this type will be marshalled into DynamoDB Number types containing - * the epoch timestamp of the date for use with DyanmoDB's Time-to-Live feature. - * - * Timezone information is not persisted. - */ -export interface DateType extends BaseType, KeyableType { - type: 'Date'; -} - -/** - * A constructor that takes no arguments. - */ -export interface ZeroArgumentsConstructor { - new (): T; -} - -/** - * A node represented by its own full Schema. Marshalled as an embedded map. - */ -export interface DocumentTypeextends BaseType { - type: 'Document'; - - /** - * A Schema outlining how the members of this document are to be - * (un)marshalled. - */ - members: Schema; - - /** - * A constructor to invoke to create an object onto which the document's - * members will be unmarshalled. If not provided, `Object.create(null)` will - * be used. - */ - valueConstructor?: ZeroArgumentsConstructor; -} - -/** - * A node used to store a key => value mapping of mixed or untyped values. - * Values provided for this node will be marshalled using run-time type - * detection and may not be exactly the same when unmarshalled. - */ -export interface HashType extends - BaseType<{[key: string]: any}>, - MarshallingOptions -{ - type: 'Hash'; -} - -/** - * A node used to store an array or iterable of like values, e.g., - * `Array`. - * - * @see CollectionType For untyped or mixed lists - * @see TupleType For tuples - */ -export interface ListType extends BaseType> { - type: 'List'; - - /** - * The schema node by which each member of the list should be - * (un)marshalled. - */ - memberType: SchemaType; -} - -/** - * A node used to store a mapping of strings to like values, e.g., - * `Map`. - * - * @see HashType For untyped of mixed hashes - * @see DocumentType For strongly-typed documents - */ -export interface MapType extends BaseType> { - type: 'Map'; - memberType: SchemaType; -} - -/** - * A node used to store null values. - */ -export interface NullType extends BaseType { - type: 'Null'; -} - -/** - * A node used to store a number value. Numbers should be representable as IEEE - * 754 double precision floating point values to ensure no precision is lost - * during (un)marshalling. - */ -export interface NumberType extends BaseType, KeyableType { - type: 'Number'; - versionAttribute?: boolean; -} - -export interface SetType extends BaseType> { - type: 'Set'; - memberType: 'String'|'Number'|'Binary'; -} - -/** - * A node used to store a string value. - */ -export interface StringType extends BaseType, KeyableType { - type: 'String'; -} - -/** - * A node used to store a fixed-length list of items, each of which may be of - * a different type, e.g., `[boolean, string]`. - */ -export interface TupleType = Array> extends - BaseType -{ - type: 'Tuple'; - members: Array; -} - -/** - * A node in a Schema used by this marshaller package. - */ -export type SchemaType = - AnyType | - BinaryType | - BooleanType | - CustomType | - CollectionType | - DateType | - DocumentType | - HashType | - ListType | - MapType | - NullType | - NumberType | - SetType | - StringType | - TupleType; - -export function isSchemaType( - arg: any, - alreadyVisited: Set = new Set() -): arg is SchemaType { - if (isBaseType(arg)) { - if (alreadyVisited.has(arg)) { - return true; - } - - alreadyVisited.add(arg); - switch (arg.type) { - case 'Binary': - case 'Date': - case 'String': - return isKeyableType(arg); - case 'Custom': - return isKeyableType(arg) - && typeof (arg as CustomType).marshall === 'function' - && typeof (arg as CustomType).unmarshall === 'function' - && [ - void 0, - 'S', - 'N', - 'B', - ].indexOf((arg as CustomType).attributeType) > -1; - case 'Document': - return isDocumentType(arg, alreadyVisited); - case 'List': - case 'Map': - return isSchemaType( - (arg as ListType).memberType, - alreadyVisited - ); - case 'Number': - return isKeyableType(arg) && ['boolean', 'undefined'] - .indexOf(typeof (arg as NumberType).versionAttribute) > -1; - case 'Tuple': - return isTupleType(arg, alreadyVisited); - default: - return true; - } - } - - return false; -} - -function isDocumentType( - arg: BaseType, - alreadyVisited: Set -): arg is DocumentType { - const {valueConstructor, members} = arg as DocumentType; - if (!members || typeof members !== 'object') { - return false; - } - - for (let key of Object.keys(members)) { - if (!isSchemaType(members[key], alreadyVisited)) { - return false; - } - } - - return ['function', 'undefined',].indexOf(typeof valueConstructor) > -1; -} - -function isTupleType( - arg: BaseType, - alreadyVisited: Set -): arg is TupleType { - const {members} = arg as TupleType; - if (!Array.isArray(members)) { - return false; - } - - for (let member of members) { - if (!isSchemaType(member, alreadyVisited)) { - return false; - } - } - - return true; -} diff --git a/packages/dynamodb-data-marshaller/src/index.ts b/packages/dynamodb-data-marshaller/src/index.ts deleted file mode 100644 index 9a6375ce..00000000 --- a/packages/dynamodb-data-marshaller/src/index.ts +++ /dev/null @@ -1,12 +0,0 @@ -export * from './InvalidSchemaError'; -export * from './InvalidValueError'; -export * from './isKey'; -export * from './KeySchema'; -export * from './keysFromSchema'; -export * from './marshallExpression'; -export * from './marshallItem'; -export * from './marshallKey'; -export * from './Schema'; -export * from './SchemaType'; -export * from './toSchemaName'; -export * from './unmarshallItem'; diff --git a/packages/dynamodb-data-marshaller/src/isKey.spec.ts b/packages/dynamodb-data-marshaller/src/isKey.spec.ts deleted file mode 100644 index 52a6419f..00000000 --- a/packages/dynamodb-data-marshaller/src/isKey.spec.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { isKey } from './isKey'; -import { SchemaType, TypeTag } from './SchemaType'; - -const keyableTypes: Array = [ - 'Binary', - 'Custom', - 'Date', - 'Number', - 'String', -]; - -const unkeyableTypes: Array = [ - 'Any', - 'Boolean', - 'Collection', - 'Document', - 'Hash', - 'List', - 'Map', - 'Null', - 'Set', - 'Tuple', -]; - -describe('isKey', () => { - for (const notKeyType of unkeyableTypes) { - it(`should return false if the field is of type ${notKeyType}`, () => { - expect( - isKey({type: notKeyType, keyType: 'HASH'} as SchemaType) - ).toBe(false); - }); - } - - for (const keyType of keyableTypes) { - it(`should return false if the field is of type ${keyType}`, () => { - expect( - isKey({type: keyType, keyType: 'HASH'} as SchemaType) - ).toBe(true); - }); - } - - it( - 'should return true if the field is an index key and the index name was supplied', - () => { - expect(isKey( - { type: 'String', indexKeyConfigurations: {foo: 'HASH'}}, - 'foo' - )).toBe(true); - } - ); - - it( - 'should return false if the field is an index key and no index name was supplied', - () => { - expect( - isKey({ type: 'String', indexKeyConfigurations: {foo: 'HASH'}}) - ).toBe(false); - } - ); - - it( - 'should return false if the field is an index key and a different index name was supplied', - () => { - expect(isKey( - { type: 'String', indexKeyConfigurations: {foo: 'HASH'}}, - 'bar' - )).toBe(false); - } - ); - - it( - 'should return false if the field is a table key and an index name was supplied', - () => { - expect(isKey( - { type: 'String', keyType: 'HASH'}, - 'foo' - )).toBe(false); - } - ); - - it( - 'should return true if the field is both a table and an index key', - () => { - expect(isKey( - { - type: 'String', - keyType: 'HASH', - indexKeyConfigurations: {foo: 'HASH'} - }, - 'foo' - )).toBe(true); - } - ); -}); diff --git a/packages/dynamodb-data-marshaller/src/isKey.ts b/packages/dynamodb-data-marshaller/src/isKey.ts deleted file mode 100644 index f6d65160..00000000 --- a/packages/dynamodb-data-marshaller/src/isKey.ts +++ /dev/null @@ -1,19 +0,0 @@ -import {SchemaType} from './SchemaType'; - -export function isKey(fieldSchema: SchemaType, indexName?: string): boolean { - if ( - fieldSchema.type === 'Binary' || - fieldSchema.type === 'Custom' || - fieldSchema.type === 'Date' || - fieldSchema.type === 'Number' || - fieldSchema.type === 'String' - ) { - return indexName !== undefined - ? Boolean( - fieldSchema.indexKeyConfigurations && - fieldSchema.indexKeyConfigurations[indexName] - ) : Boolean(fieldSchema.keyType); - } - - return false; -} diff --git a/packages/dynamodb-data-marshaller/src/keysFromSchema.spec.ts b/packages/dynamodb-data-marshaller/src/keysFromSchema.spec.ts deleted file mode 100644 index ca05a161..00000000 --- a/packages/dynamodb-data-marshaller/src/keysFromSchema.spec.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { keysFromSchema } from './keysFromSchema'; -import { Schema } from './Schema'; - -describe('keysFromSchema', () => { - const schema: Schema = { - partitionKey: { - type: 'Number', - keyType: 'HASH', - }, - createdAt: { - type: 'Date', - keyType: 'RANGE', - indexKeyConfigurations: { - chronological: 'HASH', - globalIndex: 'RANGE' - }, - attributeName: 'timestamp' - }, - createdBy: { - type: 'String', - indexKeyConfigurations: { - globalIndex: 'HASH', - localIndex: 'RANGE' - }, - attributeName: 'creator', - }, - binaryKey: { - type: 'Binary', - indexKeyConfigurations: { - binaryIndex: 'HASH' - } - }, - customKey: { - type: 'Custom', - attributeType: 'S', - marshall: str => str, - unmarshall: av => av.S, - indexKeyConfigurations: { - binaryIndex: 'RANGE', - }, - }, - listProp: { type: 'Collection' }, - }; - - it('should identify the table keys', () => { - expect(keysFromSchema(schema).tableKeys).toEqual({ - partitionKey: 'HASH', - timestamp: 'RANGE', - }); - }); - - it('should identify any index keys', () => { - expect(keysFromSchema(schema).indexKeys).toEqual({ - binaryIndex: { - binaryKey: 'HASH', - customKey: 'RANGE' - }, - chronological: { - timestamp: 'HASH', - }, - globalIndex: { - creator: 'HASH', - timestamp: 'RANGE', - }, - localIndex: { - creator: 'RANGE', - }, - }); - }); - - it('should record the attribute type of any value used as a key', () => { - expect(keysFromSchema(schema).attributes).toEqual({ - partitionKey: 'N', - timestamp: 'N', - creator: 'S', - binaryKey: 'B', - customKey: 'S', - }); - }); - - it( - 'should throw if a custom property does not define an attribute type', - () => { - const schema: Schema = { - customKey: { - type: 'Custom', - keyType: 'HASH', - marshall: str => str, - unmarshall: av => av.S, - }, - }; - expect(() => keysFromSchema(schema)).toThrow(); - } - ); -}); diff --git a/packages/dynamodb-data-marshaller/src/keysFromSchema.ts b/packages/dynamodb-data-marshaller/src/keysFromSchema.ts deleted file mode 100644 index 47538597..00000000 --- a/packages/dynamodb-data-marshaller/src/keysFromSchema.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { - AttributeTypeMap, - KeySchema, - KeyTypeMap, - ScalarAttributeType, -} from './KeySchema'; -import {Schema} from './Schema'; -import { - BinaryType, - CustomType, - DateType, - NumberType, - StringType, -} from './SchemaType'; - -export function keysFromSchema(schema: Schema): KeySchema { - const attributes: AttributeTypeMap = {}; - const tableKeys: KeyTypeMap = {}; - const indexKeys: {[key: string]: KeyTypeMap} = {}; - - for (const propertyName of Object.keys(schema)) { - const fieldSchema = schema[propertyName]; - if ( - fieldSchema.type === 'Binary' || - fieldSchema.type === 'Custom' || - fieldSchema.type === 'Date' || - fieldSchema.type === 'Number' || - fieldSchema.type === 'String' - ) { - const { - attributeName = propertyName - } = fieldSchema; - - if (fieldSchema.keyType) { - attributes[attributeName] = attributeType(fieldSchema); - tableKeys[attributeName] = fieldSchema.keyType; - } - - if ( - fieldSchema.indexKeyConfigurations && - Object.keys(fieldSchema.indexKeyConfigurations).length > 0 - ) { - attributes[attributeName] = attributeType(fieldSchema); - - for (const indexName of Object.keys( - fieldSchema.indexKeyConfigurations - )) { - if (!(indexName in indexKeys)) { - indexKeys[indexName] = {}; - } - indexKeys[indexName][attributeName] - = fieldSchema.indexKeyConfigurations[indexName]; - } - } - } - } - - return {attributes, tableKeys, indexKeys}; -} - -function attributeType( - fieldSchema: BinaryType|CustomType|DateType|NumberType|StringType -): ScalarAttributeType { - switch (fieldSchema.type) { - case 'Binary': - return 'B'; - case 'Custom': - if (!fieldSchema.attributeType) { - throw new Error( - 'Invalid schema: no attribute type defined for custom field' - ); - } - return fieldSchema.attributeType; - case 'Date': - case 'Number': - return 'N'; - case 'String': - return 'S'; - } -} diff --git a/packages/dynamodb-data-marshaller/src/marshallExpression.spec.ts b/packages/dynamodb-data-marshaller/src/marshallExpression.spec.ts deleted file mode 100644 index 335d444d..00000000 --- a/packages/dynamodb-data-marshaller/src/marshallExpression.spec.ts +++ /dev/null @@ -1,259 +0,0 @@ -import { - marshallConditionExpression, - marshallFunctionExpression, - marshallMathematicalExpression, - marshallProjectionExpression, - marshallUpdateExpression, -} from './marshallExpression'; -import { Schema } from './Schema'; -import { - AttributePath, - FunctionExpression, - MathematicalExpression, - UpdateExpression, -} from '@aws/dynamodb-expressions'; - -const schema: Schema = { - foo: { - type: 'String', - attributeName: 'bar', - }, - fizz: { - type: 'Number', - attributeName: 'buzz', - }, - nested: { - type: 'Document', - attributeName: 'nested_level_1', - members: { - nested: { - type: 'Document', - attributeName: 'nested_level_2', - members: { - scalar: { - type: 'String', - attributeName: 'nested_scalar' - }, - }, - }, - }, - }, -}; - -describe('marshallConditionExpression', () => { - it('should map nested keys to their attributeName equivalents in simple conditions', () => { - expect(marshallConditionExpression( - {type: 'Equals', subject: 'foo', object: 'baz'}, - schema - )).toEqual({ - expression: '#attr0 = :val1', - ExpressionAttributeNames: { '#attr0': 'bar' }, - ExpressionAttributeValues: { ':val1': { 'S': 'baz' } }, - }) - }); - - it('should map nested keys to their attributeName equivalents in bounding conditions', () => { - expect(marshallConditionExpression( - {type: 'Between', subject: 'fizz', lowerBound: 1, upperBound: 5}, - schema - )).toEqual({ - expression: '#attr0 BETWEEN :val1 AND :val2', - ExpressionAttributeNames: { '#attr0': 'buzz' }, - ExpressionAttributeValues: { - ':val1': { 'N': '1' }, - ':val2': { 'N': '5' }, - }, - }) - }); - - it('should map nested keys to their attributeName equivalents in membership conditions', () => { - expect(marshallConditionExpression( - {type: 'Membership', subject: 'foo', values: ['bar', 'baz', 'quux']}, - schema - )).toEqual({ - expression: '#attr0 IN (:val1, :val2, :val3)', - ExpressionAttributeNames: { '#attr0': 'bar' }, - ExpressionAttributeValues: { - ':val1': { 'S': 'bar' }, - ':val2': { 'S': 'baz' }, - ':val3': { 'S': 'quux' }, - }, - }) - }); - - it('should map nested keys to their attributeName equivalents in negated conditions', () => { - expect(marshallConditionExpression( - {type: 'Not', condition: {type: 'Equals', subject: 'foo', object: 'baz'}}, - schema - )).toEqual({ - expression: 'NOT (#attr0 = :val1)', - ExpressionAttributeNames: { '#attr0': 'bar' }, - ExpressionAttributeValues: { ':val1': { 'S': 'baz' } }, - }) - }); - - it('should map nested keys to their attributeName equivalents in compound conditions', () => { - expect(marshallConditionExpression( - { - type: 'And', - conditions: [ - {type: 'Equals', subject: 'foo', object: 'baz'}, - {type: 'Between', subject: 'fizz', lowerBound: 1, upperBound: 5}, - ] - }, - schema - )).toEqual({ - expression: '(#attr0 = :val1) AND (#attr2 BETWEEN :val3 AND :val4)', - ExpressionAttributeNames: { - '#attr0': 'bar', - '#attr2': 'buzz', - }, - ExpressionAttributeValues: { - ':val1': { 'S': 'baz' }, - ':val3': { 'N': '1' }, - ':val4': { 'N': '5' }, - }, - }) - }); - - it('should handle function conditions', () => { - expect(marshallConditionExpression( - new FunctionExpression('attributeExists', new AttributePath('nested.nested.scalar')), - schema - )).toEqual({ - expression: 'attributeExists(#attr0.#attr1.#attr2)', - ExpressionAttributeNames: { - '#attr0': 'nested_level_1', - '#attr1': 'nested_level_2', - '#attr2': 'nested_scalar', - }, - ExpressionAttributeValues: {}, - }); - - expect(marshallConditionExpression( - {type: 'Function', name: 'attribute_exists', subject: 'nested.nested.scalar'}, - schema - )).toEqual({ - expression: 'attribute_exists(#attr0.#attr1.#attr2)', - ExpressionAttributeNames: { - '#attr0': 'nested_level_1', - '#attr1': 'nested_level_2', - '#attr2': 'nested_scalar', - }, - ExpressionAttributeValues: {}, - }); - - expect(marshallConditionExpression( - { - type: 'Function', - name: 'contains', - subject: 'nested.nested.scalar', - expected: 'substr' - }, - schema - )).toEqual({ - expression: 'contains(#attr0.#attr1.#attr2, :val3)', - ExpressionAttributeNames: { - '#attr0': 'nested_level_1', - '#attr1': 'nested_level_2', - '#attr2': 'nested_scalar', - }, - ExpressionAttributeValues: { - ':val3': {S: 'substr'} - }, - }); - }); -}); - -describe('marshallFunctionExpression', () => { - it('should map nested keys to their attributeName equivalents', () => { - expect(marshallFunctionExpression( - new FunctionExpression('attributeExists', new AttributePath('nested.nested.scalar')), - schema - )).toEqual({ - expression: 'attributeExists(#attr0.#attr1.#attr2)', - ExpressionAttributeNames: { - '#attr0': 'nested_level_1', - '#attr1': 'nested_level_2', - '#attr2': 'nested_scalar', - }, - ExpressionAttributeValues: {}, - }) - }); - - it('should not map non-path arguments', () => { - expect(marshallFunctionExpression( - new FunctionExpression('beginsWith', new AttributePath('nested.nested.scalar'), 'foo'), - schema - )).toEqual({ - expression: 'beginsWith(#attr0.#attr1.#attr2, :val3)', - ExpressionAttributeNames: { - '#attr0': 'nested_level_1', - '#attr1': 'nested_level_2', - '#attr2': 'nested_scalar', - }, - ExpressionAttributeValues: { - ':val3': {'S': 'foo'}, - }, - }) - }) -}); - -describe('marshallMathematicalExpression', () => { - it('should map nested keys to their attributeName equivalents', () => { - expect(marshallMathematicalExpression( - new MathematicalExpression('fizz', '-', 2), - schema - )).toEqual({ - expression: '#attr0 - :val1', - ExpressionAttributeNames: { - '#attr0': 'buzz', - }, - ExpressionAttributeValues: { - ':val1': {'N': '2'} - }, - }) - }); -}); - -describe('marshallProjectionExpression', () => { - it('should map nested keys to their attributeName equivalents', () => { - expect(marshallProjectionExpression( - [new AttributePath('nested.nested.scalar'), 'fizz'], - schema - )).toEqual({ - expression: '#attr0.#attr1.#attr2, #attr3', - ExpressionAttributeNames: { - '#attr0': 'nested_level_1', - '#attr1': 'nested_level_2', - '#attr2': 'nested_scalar', - '#attr3': 'buzz', - }, - ExpressionAttributeValues: {}, - }) - }); -}); - -describe('marshallUpdateExpression', () => { - it('should map nested keys to their attributeName equivalents', () => { - const expr = new UpdateExpression; - expr.set(new AttributePath('nested.nested.scalar'), 'boo'); - expr.add('fizz', 1); - expr.remove('foo'); - - expect(marshallUpdateExpression(expr, schema)).toEqual({ - expression: 'ADD #attr0 :val1 SET #attr2.#attr3.#attr4 = :val5 REMOVE #attr6', - ExpressionAttributeNames: { - '#attr2': 'nested_level_1', - '#attr3': 'nested_level_2', - '#attr4': 'nested_scalar', - '#attr0': 'buzz', - '#attr6': 'bar' - }, - ExpressionAttributeValues: { - ':val5': {S: 'boo'}, - ':val1': {N: '1'}, - }, - }) - }); -}); diff --git a/packages/dynamodb-data-marshaller/src/marshallExpression.ts b/packages/dynamodb-data-marshaller/src/marshallExpression.ts deleted file mode 100644 index b9b0e861..00000000 --- a/packages/dynamodb-data-marshaller/src/marshallExpression.ts +++ /dev/null @@ -1,293 +0,0 @@ -import { Schema } from './Schema'; -import { toSchemaName } from './toSchemaName'; -import { - ExpressionAttributeNameMap, - ExpressionAttributeValueMap, -} from 'aws-sdk/clients/dynamodb'; -import { - AttributePath, - ConditionExpression, - FunctionExpression, - MathematicalExpression, - ProjectionExpression, - UpdateExpression, - ExpressionAttributes, - serializeConditionExpression, - serializeProjectionExpression, -} from '@aws/dynamodb-expressions'; - -/** - * A DynamoDB expression serialized to a string and accompanied by the name and - * value substitutions that have been performed during serialization. - */ -export interface MarshalledExpression { - /** - * A serialized expression. - */ - expression: string; - - /** - * A map of name tokens => the property name for which the token has been - * substituted in the serialized expression. - */ - ExpressionAttributeNames: ExpressionAttributeNameMap; - - /** - * A map of value tokens => the value for which the token has been - * substituted in the serialized expression. - */ - ExpressionAttributeValues: ExpressionAttributeValueMap; -} - -/** - * Serialize a condition expression, substituting any property names for the - * corresponding attribute names in the provided schema. - * - * @param expression The expression object to marshall. - * @param schema The schema of the table to which the expression pertains. - * @param attributes An optional ExpressionAttributes object to synchronize - * substitutions across multiple expressions. - */ -export function marshallConditionExpression( - expression: ConditionExpression, - schema: Schema, - attributes: ExpressionAttributes = new ExpressionAttributes -): MarshalledExpression { - const serialized = serializeConditionExpression( - normalizeConditionExpression(expression, schema), - attributes - ); - - return { - expression: serialized, - ExpressionAttributeNames: attributes.names, - ExpressionAttributeValues: attributes.values, - }; -} - -/** - * Serialize a function expression, substituting any property names for the - * corresponding attribute names in the provided schema. - * - * @param expression The expression object to marshall. - * @param schema The schema of the table to which the expression pertains. - * @param attributes An optional ExpressionAttributes object to synchronize - * substitutions across multiple expressions. - */ -export function marshallFunctionExpression( - expression: FunctionExpression, - schema: Schema, - attributes: ExpressionAttributes = new ExpressionAttributes -): MarshalledExpression { - const serialized = normalizeFunctionExpression(expression, schema) - .serialize(attributes); - - return { - expression: serialized, - ExpressionAttributeNames: attributes.names, - ExpressionAttributeValues: attributes.values, - }; -} - -/** - * Serialize a mathematical expression, substituting any property names for the - * corresponding attribute names in the provided schema. - * - * @param expression The expression object to marshall. - * @param schema The schema of the table to which the expression pertains. - * @param attributes An optional ExpressionAttributes object to synchronize - * substitutions across multiple expressions. - */ -export function marshallMathematicalExpression( - expression: MathematicalExpression, - schema: Schema, - attributes: ExpressionAttributes = new ExpressionAttributes -): MarshalledExpression { - const serialized = normalizeMathematicalExpression(expression, schema) - .serialize(attributes); - - return { - expression: serialized, - ExpressionAttributeNames: attributes.names, - ExpressionAttributeValues: attributes.values, - }; -} - -/** - * Serialize a projection expression, substituting any property names for the - * corresponding attribute names in the provided schema. - * - * @param expression The expression object to marshall. - * @param schema The schema of the table to which the expression pertains. - * @param attributes An optional ExpressionAttributes object to synchronize - * substitutions across multiple expressions. - */ -export function marshallProjectionExpression( - expression: ProjectionExpression, - schema: Schema, - attributes: ExpressionAttributes = new ExpressionAttributes -): MarshalledExpression { - const serialized = serializeProjectionExpression( - expression.map(el => toSchemaName(el, schema)), - attributes - ); - - return { - expression: serialized, - ExpressionAttributeNames: attributes.names, - ExpressionAttributeValues: attributes.values, - }; -} - -/** - * Serialize an update expression, substituting any property names for the - * corresponding attribute names in the provided schema. - * - * @param expression The expression object to marshall. - * @param schema The schema of the table to which the expression pertains. - * @param attributes An optional ExpressionAttributes object to synchronize - * substitutions across multiple expressions. - */ -export function marshallUpdateExpression( - expression: UpdateExpression, - schema: Schema, - attributes: ExpressionAttributes = new ExpressionAttributes -): MarshalledExpression { - const serialized = normalizeUpdateExpression(expression, schema) - .serialize(attributes); - - return { - expression: serialized, - ExpressionAttributeNames: attributes.names, - ExpressionAttributeValues: attributes.values, - }; -} - -function normalizeConditionExpression( - expression: ConditionExpression, - schema: Schema -): ConditionExpression { - if (FunctionExpression.isFunctionExpression(expression)) { - return normalizeFunctionExpression(expression, schema); - } - - switch (expression.type) { - case 'Equals': - case 'NotEquals': - case 'LessThan': - case 'LessThanOrEqualTo': - case 'GreaterThan': - case 'GreaterThanOrEqualTo': - return { - ...expression, - subject: toSchemaName(expression.subject, schema), - object: normalizeIfPath(expression.object, schema), - }; - - case 'Function': - switch (expression.name) { - case 'attribute_exists': - case 'attribute_not_exists': - return { - ...expression, - subject: toSchemaName(expression.subject, schema) - }; - case 'attribute_type': - case 'begins_with': - case 'contains': - return { - ...expression, - subject: toSchemaName(expression.subject, schema) - }; - } - - case 'Between': - return { - ...expression, - subject: toSchemaName(expression.subject, schema), - lowerBound: normalizeIfPath(expression.lowerBound, schema), - upperBound: normalizeIfPath(expression.upperBound, schema), - }; - case 'Membership': - return { - ...expression, - subject: toSchemaName(expression.subject, schema), - values: expression.values.map(arg => normalizeIfPath(arg, schema)), - }; - case 'Not': - return { - ...expression, - condition: normalizeConditionExpression( - expression.condition, - schema - ), - }; - case 'And': - case 'Or': - return { - ...expression, - conditions: expression.conditions.map(condition => - normalizeConditionExpression(condition, schema) - ), - }; - } -} - -function normalizeFunctionExpression( - expression: FunctionExpression, - schema: Schema -): FunctionExpression { - return new FunctionExpression( - expression.name, - ...expression.args.map(arg => normalizeIfPath(arg, schema)) - ); -} - -function normalizeMathematicalExpression( - expression: MathematicalExpression, - schema: Schema -): MathematicalExpression { - return new MathematicalExpression( - AttributePath.isAttributePath(expression.lhs) || typeof expression.lhs === 'string' - ? toSchemaName(expression.lhs, schema) - : expression.lhs, - expression.operator, - AttributePath.isAttributePath(expression.rhs) || typeof expression.rhs === 'string' - ? toSchemaName(expression.rhs, schema) - : expression.rhs, - ) -} - -const mapsToTransform: Array<[ - 'toAdd'|'toDelete'|'toSet', - 'add'|'delete'|'set' -]> = [ - ['toAdd', 'add'], - ['toDelete', 'delete'], - ['toSet', 'set'], -]; - -function normalizeUpdateExpression( - expression: UpdateExpression, - schema: Schema -): UpdateExpression { - const normalized = new UpdateExpression; - for (const [dataSet, exprMethod] of mapsToTransform) { - for (const [path, value] of expression[dataSet]) { - normalized[exprMethod](toSchemaName(path, schema), value); - } - } - expression.toRemove.forEach( - el => normalized.remove(toSchemaName(el, schema)) - ); - - return normalized; -} - -function normalizeIfPath(path: any, schema: Schema): any { - if (AttributePath.isAttributePath(path)) { - return toSchemaName(path, schema); - } - - return path; -} diff --git a/packages/dynamodb-data-marshaller/src/marshallItem.spec.ts b/packages/dynamodb-data-marshaller/src/marshallItem.spec.ts deleted file mode 100644 index e35d5b17..00000000 --- a/packages/dynamodb-data-marshaller/src/marshallItem.spec.ts +++ /dev/null @@ -1,623 +0,0 @@ -import {marshallItem} from "./marshallItem"; -import {Schema} from "./Schema"; -import {CustomType} from "./SchemaType"; -import objectContaining = jasmine.objectContaining; -import {BinarySet} from "@aws/dynamodb-auto-marshaller"; - -describe('marshallItem', () => { - it('should serialize fields to their attributeName if provided', () => { - const schema: Schema = { - boolean: { - type: 'Boolean', - attributeName: 'bool_field', - }, - }; - expect(marshallItem(schema, {boolean: true})).toEqual({ - bool_field: {BOOL: true}, - }); - }); - - it('should ignore fields not mentioned in the schema', () => { - expect(marshallItem({foo: {type: 'String'}}, {bar: 'baz'})).toEqual({}); - }); - - it('should ignore fields whose value is undefined', () => { - expect(marshallItem({foo: {type: 'String'}}, {foo: void 0})) - .toEqual({}); - }); - - it('should throw if the schema type tag is not recognized', () => { - expect(() => marshallItem({foo: {type: 'Foo'}} as any, {foo: 'bar'})) - .toThrow('Unrecognized schema node'); - }); - - describe('default values', () => { - it( - 'should call a defined default provider if the input is undefined', - () => { - const defaultProvider = jest.fn(() => 'foo'); - expect(marshallItem( - {foo: {type: 'String', defaultProvider}}, - {foo: void 0} - )).toEqual({foo: {S: 'foo'}}); - - expect(defaultProvider.mock.calls.length).toBe(1); - } - ); - - it('should not call the default provider if the input is defined', () => { - const defaultProvider = jest.fn(() => 'foo'); - expect(marshallItem( - {foo: {type: 'String', defaultProvider}}, - {foo: 'bar'} - )).toEqual({foo: {S: 'bar'}}); - - expect(defaultProvider.mock.calls.length).toBe(0); - }); - }); - - describe('"any" (untyped) fields', () => { - it('should marshall of untyped data', () => { - const schema: Schema = {mixedList: {type: 'Any'}}; - const input = { - mixedList: [ - 'string', - 123, - undefined, - new ArrayBuffer(12), - {foo: 'bar'}, - ['one string', 234, new ArrayBuffer(5)], - ] - }; - - expect(marshallItem(schema, input)).toEqual({ - mixedList: { - L: [ - {S: 'string'}, - {N: '123'}, - {B: new ArrayBuffer(12)}, - {M: {foo: {S: 'bar'}}}, - {L: [ - {S: 'one string'}, - {N: '234'}, - {B: new ArrayBuffer(5)}, - ]}, - ], - }, - }); - }); - }); - - describe('binary fields', () => { - it('should serialize fields of binary types from ArrayBuffers', () => { - const binaryDoc: Schema = { - binary: {type: 'Binary'}, - }; - const document = { - binary: new ArrayBuffer(15), - }; - - expect(marshallItem(binaryDoc, document)).toEqual({ - binary: {B: new Uint8Array(15)}, - }); - }); - - it('should serialize binary fields from ArrayBufferViews', () => { - const binaryDoc: Schema = { - binary: {type: 'Binary'}, - }; - const document = { - binary: new Int32Array(4), - }; - - expect(marshallItem(binaryDoc, document)).toEqual({ - binary: {B: new Uint8Array(16)}, - }); - }); - - it('should convert UTF-8 strings to Uint8Arrays', () => { - const binaryDoc: Schema = { - binary: {type: 'Binary'}, - }; - const document = { - binary: '☃💩', - }; - - expect(marshallItem(binaryDoc, document)).toEqual({ - binary: {B: new Uint8Array([226, 152, 131, 240, 159, 146, 169])}, - }); - }); - - it('should convert empty binary values to NULL', () => { - const binaryDoc: Schema = { - binary: {type: 'Binary'}, - }; - const document = { - binary: new Int32Array(0), - }; - - expect(marshallItem(binaryDoc, document)).toEqual({ - binary: {NULL: true}, - }); - }); - }); - - describe('binary set fields', () => { - const schema: Schema = { - binSet: { type: 'Set', memberType: 'Binary'}, - }; - - it('should serialize BinarySet fields', () => { - expect(marshallItem( - schema, - { - binSet: new BinarySet([ - new Uint8Array(1), - new Uint8Array(2), - new Uint8Array(3), - ]) - } - )).toEqual({ - binSet: { - BS: [ - new Uint8Array(1), - new Uint8Array(2), - new Uint8Array(3), - ] - }, - }); - }); - - it('should deduplicate values included in the input', () => { - expect(marshallItem( - schema, - { - binSet: [ - Uint8Array.from([240, 159, 144, 142, 240, 159, 145, 177, 226, 157, 164]).buffer, - Uint8Array.from([240, 159, 144, 142, 240, 159, 145, 177, 226, 157, 164]), - '🐎👱❤', - ] - } - )).toEqual({ - binSet:{ - BS: [ - Uint8Array.from([240, 159, 144, 142, 240, 159, 145, 177, 226, 157, 164]), - ] - }, - }); - }); - - it('should remove empty values from sets', () => { - expect(marshallItem( - schema, - { - binSet: new BinarySet([ - new ArrayBuffer(0), - new ArrayBuffer(1), - new ArrayBuffer(2), - new ArrayBuffer(3), - new ArrayBuffer(0), - ]) - } - )).toEqual({ - binSet: { - BS: [ - new Uint8Array(1), - new Uint8Array(2), - new Uint8Array(3), - ] - }, - }); - }); - - it('should render empty sets as NullAttributeValues', () => { - expect(marshallItem(schema, {binSet: [new ArrayBuffer(0)]})) - .toEqual({ - binSet: {NULL: true}, - }); - }); - }); - - describe('boolean fields', () => { - it('should marshall boolean fields', () => { - const schema: Schema = { - boolean: {type: 'Boolean'}, - }; - - expect(marshallItem(schema, {boolean: false})).toEqual({ - boolean: {BOOL: false}, - }); - }); - }); - - describe('custom fields', () => { - it('should use the marshaller function embedded in the type', () => { - const marshaller = jest.fn(() => ({S: 'stubbed'})); - const schema = { - custom: { - type: 'Custom', - marshall: marshaller, - unmarshall: jest.fn() - } as CustomType, - }; - const document = {custom: 'a value'}; - expect(marshallItem(schema, document)) - .toEqual({custom: {S: 'stubbed'}}); - - expect(marshaller.mock.calls.length).toBe(1); - expect((marshaller.mock.calls[0] as any)[0]).toBe(document.custom); - }); - }); - - describe('collection fields', () => { - it('should marshall iterables of untyped data', () => { - const schema: Schema = {mixedList: {type: 'Collection'}}; - const input = { - mixedList: [ - 'string', - 123, - undefined, - new ArrayBuffer(12), - {foo: 'bar'}, - ['one string', 234, new ArrayBuffer(5)], - ] - }; - - expect(marshallItem(schema, input)).toEqual({ - mixedList: { - L: [ - {S: 'string'}, - {N: '123'}, - {B: new ArrayBuffer(12)}, - {M: {foo: {S: 'bar'}}}, - {L: [ - {S: 'one string'}, - {N: '234'}, - {B: new ArrayBuffer(5)}, - ]}, - ], - }, - }); - }); - }); - - describe('date fields', () => { - const iso8601 = '2000-01-01T00:00:00Z'; - const epoch = 946684800; - - it('should marshall date objects', () => { - const aDate = new Date(iso8601); - const schema: Schema = {aDate: {type: 'Date'}}; - - expect(marshallItem(schema, {aDate})).toEqual({ - aDate: {N: epoch.toString(10)}, - }); - }); - - it('should marshall date strings', () => { - const schema: Schema = {aDate: {type: 'Date'}}; - - expect(marshallItem(schema, {aDate: iso8601})).toEqual({ - aDate: {N: epoch.toString(10)}, - }); - }); - - it('should marshall numbers as epoch timestamps', () => { - const schema: Schema = {aDate: {type: 'Date'}}; - - expect(marshallItem(schema, {aDate: epoch})).toEqual({ - aDate: {N: epoch.toString(10)}, - }); - }); - - it('should throw if an unexpected input is received', () => { - const schema: Schema = {aDate: {type: 'Date'}}; - - expect(() => marshallItem(schema, {aDate: new ArrayBuffer(10)})) - .toThrow(objectContaining({invalidValue: new ArrayBuffer(10)})); - }); - }); - - describe('document fields', () => { - it('should marshall documents as String => AttributeValue maps', () => { - const schema: Schema = { - nested: { - type: 'Document', - members: { - nested: { - type: 'Document', - members: { - scalar: {type: 'String'}, - }, - }, - }, - }, - }; - const input = {nested: {nested: {scalar: 'value'}}}; - - expect(marshallItem(schema, input)).toEqual({ - nested: { - M: { - nested: { - M: { - scalar: { - S: 'value', - }, - }, - }, - }, - }, - }); - }); - }); - - describe('hash fields', () => { - it('should marshall objects of untyped data', () => { - const schema: Schema = {mixedObject: {type: 'Hash'}}; - const input = { - mixedObject: { - foo: 'string', - bar: 123, - baz: new ArrayBuffer(12), - fizz: {foo: 'bar'}, - buzz: ['one string', 234, new Uint8Array(5)], - snap: new Set(['foo', 'foo', 'bar', 'bar', 'baz']), - crackle: new Set([0, 1, 2, 3, 0, 1, 2, 3]), - pop: new BinarySet([ - new Uint8Array(1), - new Uint8Array(2), - new Uint8Array(3), - ]) - } - }; - - expect(marshallItem(schema, input)).toEqual({ - mixedObject: { - M: { - foo: {S: 'string'}, - bar: {N: '123'}, - baz: {B: new ArrayBuffer(12)}, - fizz: {M: {foo: {S: 'bar'}}}, - buzz: { - L: [ - {S: 'one string'}, - {N: '234'}, - {B: new Uint8Array(5)}, - ] - }, - snap: {SS: ['foo', 'bar', 'baz']}, - crackle: {NS: ['0', '1', '2', '3']}, - pop: {BS: [ - new Uint8Array(1), - new Uint8Array(2), - new Uint8Array(3), - ]} - }, - }, - }); - }); - }); - - describe('list fields', () => { - const schema: Schema = { - list: { - type: 'List', - memberType: {type: 'String'}, - }, - }; - - it('should serialize an array of like items', () => { - expect(marshallItem(schema, {list: ['a', 'b', 'c']})).toEqual({ - list: { - L: [ - {S: 'a'}, - {S: 'b'}, - {S: 'c'}, - ], - }, - }); - }); - - it('should serialize an iterable of like items', () => { - const stringIterable = function *() { - yield 'a'; - yield 'b'; - yield 'c'; - }; - - expect(marshallItem(schema, {list: stringIterable()})).toEqual({ - list: { - L: [ - {S: 'a'}, - {S: 'b'}, - {S: 'c'}, - ], - }, - }); - }); - - it('should nullify empty members', () => { - expect(marshallItem(schema, {list: ['a', '', 'c']})).toEqual({ - list: { - L: [ - {S: 'a'}, - {NULL: true}, - {S: 'c'}, - ], - }, - }); - }); - }); - - describe('map fields', () => { - const schema: Schema = { - map: { - type: 'Map', - memberType: {type: 'String'}, - }, - }; - - it('should serialize an object with like values', () => { - expect(marshallItem(schema, {map: {foo: 'bar', fizz: 'buzz'}})) - .toEqual({ - map: { - M: { - foo: {S: 'bar'}, - fizz: {S: 'buzz'}, - }, - }, - }); - }); - - it('should serialize a [string, ValueType] iterable', () => { - const iterable = new Map([ - ['foo', 'bar'], - ['fizz', 'buzz'], - ]); - - expect(marshallItem(schema, {map: iterable})).toEqual({ - map: { - M: { - foo: {S: 'bar'}, - fizz: {S: 'buzz'}, - }, - }, - }); - }); - - it( - 'should throw if a value that cannot be converted to a map is received', - () => { - expect(() => marshallItem(schema, {map: 234})).toThrow(); - } - ); - }); - - describe('null fields', () => { - it('should always return a null AttributeValue', () => { - for (let value of ['string', 234, false, [], {}, new Int8Array(0)]) { - expect(marshallItem({value: {type: 'Null'}}, {value})) - .toEqual({value: {NULL: true}}); - } - }); - }); - - describe('number fields', () => { - it('should marshall number fields', () => { - expect(marshallItem({num: {type: 'Number'}}, {num: 123})) - .toEqual({num: {N: '123'}}); - }); - }); - - describe('number set fields', () => { - const schema: Schema = { - numSet: { type: 'Set', memberType: 'Number'}, - }; - - it('should serialize NumberSet fields', () => { - expect(marshallItem(schema, {numSet: new Set([1, 2, 3])})) - .toEqual({ - numSet: {NS: ['1', '2', '3']}, - }); - }); - - it('should deduplicate values included in the input', () => { - expect(marshallItem(schema, {numSet: [1, 2, 3, 1]})) - .toEqual({ - numSet: {NS: ['1', '2', '3']}, - }); - }); - - it('should render empty sets as NullAttributeValues', () => { - expect(marshallItem(schema, {numSet: []})) - .toEqual({ - numSet: {NULL: true}, - }); - }); - }); - - describe('set fields', () => { - const schema: Schema = { - fooSet: { type: 'Set', memberType: 'foo'} as any, - }; - - it('should throw an error if the memberType is not recognized', () => { - expect(() => marshallItem(schema, {fooSet: [1, 2, 3, 1]})) - .toThrowError(/Unrecognized set member type/); - }) - }); - - describe('string fields', () => { - it('should marshall string fields', () => { - expect(marshallItem({str: {type: 'String'}}, {str: 'string'})) - .toEqual({str: {S: 'string'}}); - }); - - it('should marshall stringable objects', () => { - expect(marshallItem({str: {type: 'String'}}, {str: {}})) - .toEqual({str: {S: '[object Object]'}}); - }); - - it('should render empty strings as a NullAttributeValue', () => { - expect(marshallItem({str: {type: 'String'}}, {str: ''})) - .toEqual({str: {NULL: true}}); - }); - }); - - describe('string set fields', () => { - const schema: Schema = { - strSet: { type: 'Set', memberType: 'String'}, - }; - - it('should serialize StringSet fields', () => { - expect(marshallItem(schema, {strSet: new Set(['a', 'b', 'c'])})) - .toEqual({ - strSet: {SS: ['a', 'b', 'c']}, - }); - }); - - it('should deduplicate values included in the input', () => { - expect(marshallItem(schema, {strSet: ['a', 'b', 'c', 'a']})) - .toEqual({ - strSet: {SS: ['a', 'b', 'c']}, - }); - }); - - it('should remove empty values from sets', () => { - expect(marshallItem(schema, {strSet: ['', 'a', 'b', 'c', '']})) - .toEqual({ - strSet: {SS: ['a', 'b', 'c']}, - }); - }); - - it('should render empty sets as NullAttributeValues', () => { - expect(marshallItem(schema, {strSet: ['', '']})) - .toEqual({ - strSet: {NULL: true}, - }); - }); - }); - - describe('tuple fields', () => { - const schema: Schema = { - jobResult: { - type: 'Tuple', - members: [ - {type: 'Boolean'}, - {type: 'Number'}, - ], - } - }; - - it('should serialize Tuples', () => { - expect(marshallItem(schema, {jobResult: [true, 123]})).toEqual({ - jobResult: { - L: [ - {BOOL: true}, - {N: '123'}, - ], - }, - }); - }); - }); -}); diff --git a/packages/dynamodb-data-marshaller/src/marshallItem.ts b/packages/dynamodb-data-marshaller/src/marshallItem.ts deleted file mode 100644 index 762f72f3..00000000 --- a/packages/dynamodb-data-marshaller/src/marshallItem.ts +++ /dev/null @@ -1,322 +0,0 @@ -import { Schema } from './Schema'; -import { SchemaType } from './SchemaType'; -import { InvalidValueError } from './InvalidValueError'; -import { InvalidSchemaError } from './InvalidSchemaError'; -import { AttributeMap, AttributeValue } from 'aws-sdk/clients/dynamodb'; -import { - BinarySet, - BinaryValue, - Marshaller, - NumberValueSet, -} from '@aws/dynamodb-auto-marshaller'; -const bytes = require('utf8-bytes'); - -/** - * Converts a JavaScript object into a DynamoDB Item. - * - * @param schema Metadata explaining how the provided input is to be marshalled - * @param input JavaScript object to convert - */ -export function marshallItem( - schema: Schema, - input: {[key: string]: any} -): AttributeMap { - const marshalled: AttributeMap = {}; - - for (const key of Object.keys(schema)) { - const value = input[key]; - const {attributeName = key} = schema[key]; - const marshalledValue = marshallValue(schema[key], value); - if (marshalledValue) { - marshalled[attributeName] = marshalledValue; - } - } - - return marshalled; -} - -/** - * Converts a value into a DynamoDB AttributeValue. - * - * @param schemaType Metadata outlining how the value is to be understood and - * converted - * @param input Value to convert - */ -export function marshallValue( - schemaType: SchemaType, - input: any -): AttributeValue|undefined { - if (input === undefined) { - const {defaultProvider} = schemaType; - if (typeof defaultProvider === 'function') { - input = defaultProvider(); - } else { - return undefined; - } - } - - if (schemaType.type === 'Any') { - const { - onEmpty = 'nullify', - onInvalid = 'omit', - unwrapNumbers = false, - } = schemaType; - const marshaller = new Marshaller({onEmpty, onInvalid, unwrapNumbers}); - return marshaller.marshallValue(input); - } - - if (schemaType.type === 'Binary') { - if (!input || input.length === 0 || input.byteLength === 0) { - return {NULL: true}; - } - - return {B: marshallBinary(input)}; - } - - if (schemaType.type === 'Boolean') { - return {BOOL: Boolean(input)}; - } - - if (schemaType.type === 'Custom') { - return schemaType.marshall(input); - } - - if (schemaType.type === 'Collection') { - const { - onEmpty = 'nullify', - onInvalid = 'omit', - unwrapNumbers = false, - } = schemaType; - const marshaller = new Marshaller({onEmpty, onInvalid, unwrapNumbers}); - - const collected: Array = []; - for (const element of input) { - const marshalled = marshaller.marshallValue(element); - if (marshalled) { - collected.push(marshalled); - } - } - - return {L: collected}; - } - - if (schemaType.type === 'Date') { - let date: Date; - if (typeof input === 'string') { - date = new Date(input); - } else if (typeof input === 'number') { - date = new Date(input * 1000); - } else if (isDate(input)) { - date = input; - } else { - throw new InvalidValueError( - input, - 'Unable to convert value to date' - ); - } - - return {N: marshallNumber(Math.floor(date.valueOf() / 1000))}; - } - - if (schemaType.type === 'Document') { - return {M: marshallItem(schemaType.members, input)}; - } - - if (schemaType.type === 'Hash') { - const { - onEmpty = 'nullify', - onInvalid = 'omit', - unwrapNumbers = false, - } = schemaType; - const marshaller = new Marshaller({onEmpty, onInvalid, unwrapNumbers}); - - return {M: marshaller.marshallItem(input)}; - } - - if (schemaType.type === 'List') { - const elements = []; - for (const member of input) { - const marshalled = marshallValue(schemaType.memberType, member); - if (marshalled) { - elements.push(marshalled); - } - } - return {L: elements}; - } - - if (schemaType.type === 'Map') { - const marshalled: AttributeMap = {}; - if (typeof input[Symbol.iterator] === 'function') { - for (let [key, value] of input) { - const marshalledValue = marshallValue( - schemaType.memberType, - value - ); - if (marshalledValue) { - marshalled[key] = marshalledValue; - } - } - } else if (typeof input === 'object') { - for (const key of Object.keys(input)) { - const marshalledValue = marshallValue( - schemaType.memberType, - input[key] - ); - if (marshalledValue) { - marshalled[key] = marshalledValue; - } - } - } else { - throw new InvalidValueError( - input, - 'Unable to convert value to map' - ); - } - - return {M: marshalled}; - } - - if (schemaType.type === 'Null') { - return {NULL: true}; - } - - if (schemaType.type === 'Number') { - return {N: marshallNumber(input)}; - } - - if (schemaType.type === 'Set') { - if (schemaType.memberType === 'Binary') { - if (!(input instanceof BinarySet)) { - const set = new BinarySet(); - for (const item of input) { - set.add(marshallBinary(item)); - } - input = set; - } - - return marshallSet( - input, - marshallBinary, - (bin: BinaryValue) => bin.byteLength === 0, - 'BS' - ); - } - - if (schemaType.memberType === 'Number') { - if (!(input instanceof Set)) { - input = new NumberValueSet(input) - } - - return marshallSet( - input, - marshallNumber, - () => false, - 'NS' - ); - } - - if (schemaType.memberType === 'String') { - if (!(input instanceof Set)) { - const original = input; - input = new Set(); - for (const el of original) { - input.add(el); - } - } - - return marshallSet( - input, - marshallString, - (string: string) => string.length === 0, - 'SS' - ); - } - - throw new InvalidSchemaError( - schemaType, - `Unrecognized set member type: ${schemaType.memberType}` - ); - } - - if (schemaType.type === 'String') { - const string = marshallString(input); - if (string.length === 0) { - return {NULL: true}; - } - - return {S: string}; - } - - if (schemaType.type === 'Tuple') { - return { - L: schemaType.members - .map((type: SchemaType, index: number) => marshallValue(type, input[index])) - .filter((val): val is AttributeValue => val !== undefined) - } - } - - throw new InvalidSchemaError(schemaType, 'Unrecognized schema node'); -} - -function marshallBinary( - input: string|ArrayBuffer|ArrayBufferView -): Uint8Array { - if (ArrayBuffer.isView(input)) { - return new Uint8Array( - input.buffer, - input.byteOffset, - input.byteLength - ); - } - - if (isArrayBuffer(input)) { - return new Uint8Array(input); - } - - return Uint8Array.from(bytes(input)); -} - -function marshallNumber(input: number): string { - return input.toString(10); -} - -function marshallString(input: {toString(): string}): string { - return input.toString(); -} - -function marshallSet( - value: Iterable, - marshaller: (element: InputType) => MarshalledElementType, - isEmpty: (member: MarshalledElementType) => boolean, - setTag: 'BS'|'NS'|'SS' -): AttributeValue { - const collected: Array = []; - for (const member of value) { - const marshalled = marshaller(member); - if (isEmpty(marshalled)) { - // DynamoDB sets cannot contain empty values - continue; - } - - collected.push(marshalled); - } - - if (collected.length === 0) { - return {NULL: true}; - } - - return {[setTag]: collected}; -} - -function isArrayBuffer(arg: any): arg is ArrayBuffer { - return typeof ArrayBuffer === 'function' - && ( - arg instanceof ArrayBuffer || - Object.prototype.toString.call(arg) === '[object ArrayBuffer]' - ); -} - -function isDate(arg: any): arg is Date { - return arg instanceof Date - || Object.prototype.toString.call(arg) === '[object Date]'; -} diff --git a/packages/dynamodb-data-marshaller/src/marshallKey.spec.ts b/packages/dynamodb-data-marshaller/src/marshallKey.spec.ts deleted file mode 100644 index 3169c859..00000000 --- a/packages/dynamodb-data-marshaller/src/marshallKey.spec.ts +++ /dev/null @@ -1,52 +0,0 @@ -import {marshallKey} from './marshallKey'; -import {Schema} from './Schema'; - -describe('marshallKey', () => { - const schema: Schema = { - fizz: { - type: 'String', - keyType: 'HASH', - attributeName: 'foo', - }, - buzz: { - type: 'Date', - keyType: 'RANGE', - indexKeyConfigurations: {bar: 'HASH'}, - attributeName: 'bar', - }, - pop: { - type: 'Number', - indexKeyConfigurations: {foo: 'HASH'} - }, - notAKey: { - type: 'Tuple', - members: [ - {type: 'Boolean'}, - {type: 'String'}, - ] - }, - }; - - const input = { - fizz: 'baz', - buzz: new Date(1000), - pop: 10, - notAKey: [true, 'quux'] - }; - - it('should only marshall key fields', () => { - expect(marshallKey(schema, input)) - .toEqual({ - foo: {S: 'baz'}, - bar: {N: '1'} - }); - }); - - it( - 'should marshall key fields for the correct index if an index name is supplied', - () => { - expect(marshallKey(schema, input, 'foo')).toEqual({pop: {N: '10'}}); - expect(marshallKey(schema, input, 'bar')).toEqual({bar: {N: '1'}}); - } - ); -}); diff --git a/packages/dynamodb-data-marshaller/src/marshallKey.ts b/packages/dynamodb-data-marshaller/src/marshallKey.ts deleted file mode 100644 index 31a32ab3..00000000 --- a/packages/dynamodb-data-marshaller/src/marshallKey.ts +++ /dev/null @@ -1,25 +0,0 @@ -import {isKey} from './isKey'; -import {marshallValue} from './marshallItem'; -import {Schema} from './Schema'; -import {AttributeMap} from 'aws-sdk/clients/dynamodb'; - -export function marshallKey( - schema: Schema, - input: {[key: string]: any}, - indexName?: string -): AttributeMap { - const marshalled: AttributeMap = {}; - - for (const propertyKey of Object.keys(schema)) { - const fieldSchema = schema[propertyKey]; - if (isKey(fieldSchema, indexName)) { - const {attributeName = propertyKey} = fieldSchema; - const value = marshallValue(fieldSchema, input[propertyKey]); - if (value) { - marshalled[attributeName] = value; - } - } - } - - return marshalled; -} diff --git a/packages/dynamodb-data-marshaller/src/toSchemaName.spec.ts b/packages/dynamodb-data-marshaller/src/toSchemaName.spec.ts deleted file mode 100644 index bdfc7083..00000000 --- a/packages/dynamodb-data-marshaller/src/toSchemaName.spec.ts +++ /dev/null @@ -1,93 +0,0 @@ -import {Schema} from './Schema'; -import {toSchemaName} from './toSchemaName'; -import {AttributePath} from '@aws/dynamodb-expressions'; - -const testCases = new Map(); -const schema: Schema = { - unchanged: {type: 'String'}, - property: { - type: 'String', - attributeName: 'attributeName', - }, - foo: { - type: 'Document', - attributeName: 'topLevelFoo', - members: { - foo: { - type: 'String', - attributeName: 'fizz', - }, - bar: { - type: 'String', - attributeName: 'buzz', - }, - baz: { - type: 'String', - attributeName: 'pop', - } - } - }, - bar: { - type: 'List', - attributeName: 'topLevelBar', - memberType: { - type: 'Document', - members: { - tom: { - type: 'String', - attributeName: 'jerry' - }, - bugs: { - type: 'String', - attributeName: 'daffy' - }, - itchy: { - type: 'List', - attributeName: 'scratchy', - memberType: { - type: 'List', - memberType: { - type: 'Document', - members: { - nameToReplace: { - type: 'String', - attributeName: 'replacementName' - } - } - } - } - } - } - } - }, - untypedHash: { - type: 'Hash', - attributeName: 'terminalType' - } -}; - -testCases.set('unchanged', 'unchanged'); -testCases.set('property', 'attributeName'); -testCases.set('foo.foo', 'topLevelFoo.fizz'); -testCases.set('foo.bar', 'topLevelFoo.buzz'); -testCases.set('foo.baz', 'topLevelFoo.pop'); -testCases.set('bar[11].tom', 'topLevelBar[11].jerry'); -testCases.set('bar[21].bugs', 'topLevelBar[21].daffy'); -testCases.set( - 'bar[1].itchy[23][2].nameToReplace', - 'topLevelBar[1].scratchy[23][2].replacementName' -); -testCases.set('untypedHash.foo.bar.baz', 'terminalType.foo.bar.baz'); -testCases.set( - 'unknownProperty.access[1][2].baz', - 'unknownProperty.access[1][2].baz' -); - -describe('toSchemaName', () => { - for (const [input, output] of testCases) { - it(`should convert a path of ${input} to ${output}`, () => { - expect(toSchemaName(input, schema).elements) - .toEqual(new AttributePath(output).elements); - }); - } -}); diff --git a/packages/dynamodb-data-marshaller/src/toSchemaName.ts b/packages/dynamodb-data-marshaller/src/toSchemaName.ts deleted file mode 100644 index fd337139..00000000 --- a/packages/dynamodb-data-marshaller/src/toSchemaName.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { AttributePath } from '@aws/dynamodb-expressions'; -import { Schema } from './Schema'; -import { SchemaType } from './SchemaType'; - -export function toSchemaName( - path: AttributePath|string, - schema: Schema -): AttributePath { - if (typeof path === 'string') { - path = new AttributePath(path); - } - const elements = path.elements.map(el => ({...el})); - - let cursor: SchemaType = { - type: 'Document', - members: schema - }; - for (const element of elements) { - if ( - element.type === 'AttributeName' && - cursor && - cursor.type === 'Document' - ) { - const {name} = element; - element.name = getSchemaName(name, cursor.members); - cursor = cursor.members[name]; - } else if ( - element.type === 'ListIndex' && - cursor && - cursor.type === 'List' - ) { - cursor = (cursor as any).memberType; - } else { - break; - } - } - - return new AttributePath(elements); -} - -export function getSchemaName(propertyName: string, schema: Schema): string { - const fieldSchema = schema[propertyName]; - if (fieldSchema) { - const {attributeName = propertyName} = fieldSchema; - return attributeName; - } - - return propertyName; -} diff --git a/packages/dynamodb-data-marshaller/src/unmarshallItem.spec.ts b/packages/dynamodb-data-marshaller/src/unmarshallItem.spec.ts deleted file mode 100644 index 9518619b..00000000 --- a/packages/dynamodb-data-marshaller/src/unmarshallItem.spec.ts +++ /dev/null @@ -1,563 +0,0 @@ -import {unmarshallItem} from "./unmarshallItem"; -import {Schema} from "./Schema"; -import { - BinarySet, - NumberValue, - NumberValueSet, -} from "@aws/dynamodb-auto-marshaller"; - -describe('unmarshallItem', () => { - it('should unmarshall fields from their attributeName if provided', () => { - const attributeName = 'binVal'; - const schema: Schema = { - binary: {type: 'Binary', attributeName}, - }; - - expect(unmarshallItem( - schema, - {[attributeName]: {B: new Uint8Array(15)}} - )).toEqual({binary: new Uint8Array(15)}); - }); - - it('should ignore fields not mentioned in the schema', () => { - const schema: Schema = { - binary: {type: 'Binary'}, - }; - - expect(unmarshallItem(schema, {str: {S: 'a string'}})).toEqual({}); - }); - - it('should ignore fields whose type differs from that in the schema', () => { - const schema: Schema = { - binary: {type: 'Binary'}, - }; - - expect(unmarshallItem(schema, {binary: {S: 'a string'}})).toEqual({}); - }); - - it('should throw if the schema type tag is not recognized', () => { - const schema: Schema = { - binary: {type: 'Foo'} as any, - }; - - expect(() => unmarshallItem(schema, {binary: {S: 'a string'}})) - .toThrow(); - }); - - describe('binary fields', () => { - const schema: Schema = { - binary: {type: 'Binary'}, - }; - - it('should unmarshall binary fields', () => { - expect(unmarshallItem(schema, {binary: {B: new Uint8Array(15)}})) - .toEqual({binary: new Uint8Array(15)}); - }); - - it('should convert null values to an empty binary value', () => { - expect(unmarshallItem(schema, {binary: {NULL: true}})) - .toEqual({binary: new Uint8Array(0)}); - }); - }); - - describe('"any" (untyped) fields', () => { - it('should marshall of untyped data', () => { - const schema: Schema = {mixedList: {type: 'Any'}}; - const input = { - mixedList: { - L: [ - {S: 'string'}, - {N: '123'}, - {B: new ArrayBuffer(12)}, - {M: {foo: {S: 'bar'}}}, - {L: [ - {S: 'one string'}, - {N: '234'}, - {B: new ArrayBuffer(5)}, - ]}, - ], - } - }; - - expect(unmarshallItem(schema, input)).toEqual({ - mixedList: [ - 'string', - new NumberValue(123), - new ArrayBuffer(12), - {foo: 'bar'}, - ['one string', new NumberValue(234), new ArrayBuffer(5)], - ], - }); - }); - - it('should marshall of untyped data, considering provided marshalling options', () => { - const schema: Schema = { - mixedList: { - type: 'Any', - unwrapNumbers: true - } - }; - const input = { - mixedList: { - L: [ - {S: 'string'}, - {N: '123'}, - {B: new ArrayBuffer(12)}, - {M: {foo: {S: 'bar'}}}, - {L: [ - {S: 'one string'}, - {N: '234'}, - {B: new ArrayBuffer(5)}, - ]}, - ], - } - }; - - expect(unmarshallItem(schema, input)).toEqual({ - mixedList: [ - 'string', - 123, - new ArrayBuffer(12), - {foo: 'bar'}, - ['one string', 234, new ArrayBuffer(5)], - ], - }); - }); - }); - - describe('binary set fields', () => { - const schema: Schema = { - binSet: {type: 'Set', memberType: 'Binary'}, - }; - - it('should unmarshall binary set fields', () => { - const attrMap = { - binSet: { - BS: [ - new Uint8Array(1), - new Uint8Array(2), - new Uint8Array(3), - ], - }, - }; - - expect(unmarshallItem(schema, attrMap)).toEqual({ - binSet: new BinarySet(attrMap.binSet.BS), - }); - }); - - it('should unmarshall null values as empty binary sets', () => { - expect(unmarshallItem(schema, {binSet: {NULL: true}})) - .toEqual({binSet: new BinarySet()}); - }); - - it('should unmarshall type mismatches as undefined', () => { - expect(unmarshallItem(schema, {binSet: {BOOL: true}})) - .toEqual({binSet: void 0}); - }); - }); - - describe('boolean fields', () => { - const schema: Schema = { - boolean: {type: 'Boolean'}, - }; - - it('should unmarshall boolean fields', () => { - expect(unmarshallItem(schema, {boolean: {BOOL: false}})) - .toEqual({boolean: false}); - }); - }); - - describe('collection fields', () => { - it('should unmarshall untyped collections', () => { - const schema: Schema = {mixedList: {type: 'Collection'}}; - const input = { - mixedList: { - L: [ - {S: 'string'}, - {N: '123'}, - {B: new Uint8Array(12)}, - {M: {foo: {S: 'bar'}}}, - {L: [ - {S: 'one string'}, - {N: '234'}, - {B: new Uint8Array(5)}, - ]}, - ], - }, - }; - - expect(unmarshallItem(schema, input)).toEqual({ - mixedList: [ - 'string', - new NumberValue('123'), - new Uint8Array(12), - {foo: 'bar'}, - ['one string', new NumberValue('234'), new Uint8Array(5)], - ] - }); - }); - }); - - describe('custom fields', () => { - it( - 'should unmarshall custom fields by invoking the unmarshaller defined in the schema', - () => { - const unmarshall = jest.fn(() => 'unmarshalled'); - const schema: Schema = { - custom: { - type: 'Custom', - marshall: jest.fn(), - unmarshall, - }, - }; - - expect(unmarshallItem(schema, {custom: {NULL: true}})) - .toEqual({custom: 'unmarshalled'}); - - expect(unmarshall.mock.calls.length).toBe(1); - expect(unmarshall.mock.calls[0]).toEqual([{NULL: true}]); - } - ); - }); - - describe('date fields', () => { - const schema: Schema = {aDate: {type: 'Date'}}; - const iso8601 = '2000-01-01T00:00:00Z'; - const epoch = 946684800; - - it('should unmarshall dates into Date objects', () => { - expect(unmarshallItem(schema, {aDate: {N: epoch.toString(10)}})) - .toEqual({aDate: new Date(iso8601)}); - }); - - it( - 'should leaves dates undefined if the value at the designated key is not a number', - () => { - expect(unmarshallItem(schema, {aDate: {S: epoch.toString(10)}})) - .toEqual({}); - } - ); - }); - - describe('document fields', () => { - it('should recursively unmarshall documents', () => { - const schema: Schema = { - nested: { - type: 'Document', - members: { - nested: { - type: 'Document', - members: { - scalar: {type: 'String'}, - }, - }, - }, - }, - }; - const input = { - nested: { - M: { - nested: { - M: { - scalar: { - S: 'value', - }, - }, - }, - }, - }, - }; - - expect(unmarshallItem(schema, input)) - .toEqual({nested: {nested: {scalar: 'value'}}}); - }); - - it( - 'should invoke the constructor defined in the schema for documents', - () => { - const ctor = class {}; - const schema: Schema = { - ctorDoc: { - type: 'Document', - members: {}, - valueConstructor: ctor, - } - }; - - const unmarshalled = unmarshallItem( - schema, - {ctorDoc: {M: {}}}, - ); - expect(unmarshalled.ctorDoc).toBeInstanceOf(ctor); - } - ); - - it('should return undefined for unexpected types', () => { - const schema: Schema = { - doc: { - type: 'Document', - members: {}, - } - }; - - expect(unmarshallItem(schema, {doc: {L: []}})).toEqual({}); - }); - }); - - describe('hash fields', () => { - it('should unmarshall untyped hashes', () => { - const schema: Schema = {mixedHash: {type: 'Hash'}}; - const input = { - mixedHash: { - M: { - foo: {S: 'string'}, - bar: {N: '123'}, - baz: {B: new Uint8Array(12)}, - quux: { - BS: [ - new Uint8Array(1), - new Uint8Array(2), - new Uint8Array(3), - ] - }, - fizz: {M: {foo: {S: 'bar'}}}, - buzz: { - L: [ - {S: 'one string'}, - {N: '234'}, - {B: new Uint8Array(5)}, - ], - }, - pop: { - NS: ['123', '234', '345'], - } - }, - }, - }; - - expect(unmarshallItem(schema, input)).toEqual({ - mixedHash: { - foo: 'string', - bar: new NumberValue('123'), - baz: new Uint8Array(12), - quux: new BinarySet([ - new Uint8Array(1), - new Uint8Array(2), - new Uint8Array(3), - ]), - fizz: {foo: 'bar'}, - buzz: [ - 'one string', - new NumberValue('234'), - new Uint8Array(5) - ], - pop: new NumberValueSet([ - new NumberValue('123'), - new NumberValue('234'), - new NumberValue('345'), - ]), - } - }); - }); - }); - - describe('list fields', () => { - const schema: Schema = { - list: { - type: 'List', - memberType: {type: 'String'}, - }, - }; - - it('should unmarshall lists of like items', () => { - expect(unmarshallItem( - schema, - { - list: { - L: [ - {S: 'a'}, - {S: 'b'}, - {S: 'c'}, - ], - }, - } - )).toEqual({list: ['a', 'b', 'c']}); - }); - - it('should unmarshall non-lists as undefined', () => { - expect(unmarshallItem(schema, {list: {S: 's'}})).toEqual({}); - }); - }); - - describe('map fields', () => { - const schema: Schema = { - map: { - type: 'Map', - memberType: {type: 'String'}, - }, - }; - - it('should unmarshall maps of string keys to like items', () => { - expect(unmarshallItem( - schema, - { - map: { - M: { - foo: {S: 'bar'}, - fizz: {S: 'buzz'}, - }, - }, - } - )) - .toEqual({ - map: new Map([ - ['foo', 'bar'], - ['fizz', 'buzz'], - ]) - }); - }); - - it('should unmarshall unexpected types as undefined', () => { - expect(unmarshallItem(schema, {map: {S: 'foo'}})).toEqual({}); - }); - }); - - describe('null fields', () => { - const schema: Schema = { - 'null': {type: 'Null'}, - }; - - it('should unmarshall null fields', () => { - expect(unmarshallItem(schema, {'null': {NULL: true}})) - .toEqual({'null': null}); - }); - - it('should unmarshall unexpected types as undefined', () => { - expect(unmarshallItem(schema, {'null': {S: 'b'}})).toEqual({}); - }); - }); - - describe('number fields', () => { - const schema: Schema = { - number: {type: 'Number'}, - }; - - it('should unmarshall number fields', () => { - expect(unmarshallItem(schema, {number: {N: '123'}})) - .toEqual({number: 123}); - }); - - it('should unmarshall unexpected types as undefined', () => { - expect(unmarshallItem(schema, {number: {S: '123'}})).toEqual({}); - }); - }); - - describe('number set fields', () => { - const schema: Schema = { - numSet: { type: 'Set', memberType: 'Number'}, - }; - - it('should unmarshall number set fields', () => { - expect(unmarshallItem( - schema, - {numSet: {NS: ['1', '2', '3']}} - )).toEqual({numSet: new Set([1, 2, 3])}); - }); - - it('should unmarshall null values as empty sets', () => { - expect(unmarshallItem(schema, {numSet: {NULL: true}})) - .toEqual({numSet: new Set()}); - }); - - it('should unmarshall unexpected types as undefined', () => { - expect(unmarshallItem( - schema, - {numSet: {SS: ['1', '2', '3']}} - )).toEqual({}); - }); - }); - - describe('set fields', () => { - const schema: Schema = { - fooSet: { type: 'Set', memberType: 'foo'} as any, - }; - - it('should throw an error if the memberType is not recognized', () => { - expect(() => unmarshallItem(schema, {fooSet: {NS: ['1', '2', '3']}})) - .toThrowError(/Unrecognized set member type/); - }) - }); - - describe('string fields', () => { - const schema: Schema = { - string: {type: 'String'}, - }; - - it('should unmarshall string fields', () => { - expect(unmarshallItem(schema, {string: {S: 'string'}})) - .toEqual({string: 'string'}); - }); - - it('should unmarshall null values as empty strings', () => { - expect(unmarshallItem(schema, {string: {NULL: true}})) - .toEqual({string: ''}); - }); - }); - - describe('string set fields', () => { - const schema: Schema = { - strSet: { type: 'Set', memberType: 'String'}, - }; - - it('should unmarshall string set fields', () => { - expect(unmarshallItem( - schema, - {strSet: {SS: ['a', 'b', 'c']}} - )).toEqual({strSet: new Set(['a', 'b', 'c'])}); - }); - - it('should unmarshall null values as empty sets', () => { - expect(unmarshallItem(schema, {strSet: {NULL: true}})) - .toEqual({strSet: new Set()}); - }); - - it('should unmarshall unexpected types as undefined', () => { - expect(unmarshallItem( - schema, - {strSet: {NS: ['a', 'b', 'c']}} - )).toEqual({}); - }); - }); - - describe('tuple fields', () => { - const schema: Schema = { - jobResult: { - type: 'Tuple', - members: [ - {type: 'Boolean'}, - {type: 'Number'}, - ], - } - }; - - it('should unmarshall tuples', () => { - expect(unmarshallItem( - schema, - { - jobResult: { - L: [ - {BOOL: true}, - {N: '123'}, - ], - }, - } - )).toEqual({jobResult: [true, 123]}); - }); - - it('should unmarshall unexpected types as undefined', () => { - expect(unmarshallItem(schema, {jobResult: {BOOL: true}})) - .toEqual({}); - }); - }); -}); diff --git a/packages/dynamodb-data-marshaller/src/unmarshallItem.ts b/packages/dynamodb-data-marshaller/src/unmarshallItem.ts deleted file mode 100644 index d89d0381..00000000 --- a/packages/dynamodb-data-marshaller/src/unmarshallItem.ts +++ /dev/null @@ -1,179 +0,0 @@ -import {Schema} from "./Schema"; -import { - ListType, - MapType, - SchemaType, - TupleType, - ZeroArgumentsConstructor, -} from "./SchemaType"; -import {InvalidSchemaError} from "./InvalidSchemaError"; -import {BinarySet, Marshaller} from "@aws/dynamodb-auto-marshaller"; -import { - AttributeMap, - AttributeValue, - AttributeValueList, - NumberSetAttributeValue, - StringSetAttributeValue, -} from "aws-sdk/clients/dynamodb"; - -/** - * Unmarshall a DynamoDB item into a JavaScript value. - * - * @param schema Metadata outlining the types to be expected - * throughout the input - * @param input The value to unmarshall - * @param valueConstructor A zero-argument constructor used to create the - * object onto which the input should be unmarshalled - */ -export function unmarshallItem( - schema: Schema, - input: AttributeMap, - valueConstructor?: ZeroArgumentsConstructor -): T { - const unmarshalled: T = valueConstructor - ? new valueConstructor() - : Object.create(null); - - for (const key of Object.keys(schema)) { - const {attributeName = key} = schema[key]; - if (attributeName in input) { - (unmarshalled as {[key: string]: any})[key] = unmarshallValue( - schema[key], - input[attributeName] - ); - } - } - - return unmarshalled; -} - -function unmarshallValue(schemaType: SchemaType, input: AttributeValue): any { - switch (schemaType.type) { - case 'Any': - case 'Collection': - case 'Hash': - const { - onEmpty = 'leave', - onInvalid = 'throw', - unwrapNumbers = false, - } = schemaType; - const autoMarshaller = new Marshaller({onEmpty, onInvalid, unwrapNumbers}); - return autoMarshaller.unmarshallValue(input); - case 'Binary': - if (input.NULL) { - return new Uint8Array(0); - } - - return input.B; - case 'Boolean': - return input.BOOL; - case 'Custom': - return schemaType.unmarshall(input); - case 'Date': - return input.N ? new Date(Number(input.N) * 1000) : undefined; - case 'Document': - return input.M - ? unmarshallItem( - schemaType.members, - input.M, - schemaType.valueConstructor - ) : undefined; - case 'List': - return input.L ? unmarshallList(schemaType, input.L) : undefined; - case 'Map': - return input.M ? unmarshallMap(schemaType, input.M) : undefined; - case 'Null': - return input.NULL ? null : undefined; - case 'Number': - return typeof input.N === 'string' ? Number(input.N) : undefined; - case 'Set': - switch (schemaType.memberType) { - case 'Binary': - if (input.NULL) { - return new BinarySet(); - } - - return typeof input.BS !== 'undefined' - ? new BinarySet(input.BS as Array) - : undefined; - case 'Number': - if (input.NULL) { - return new Set(); - } - - return input.NS ? unmarshallNumberSet(input.NS) : undefined; - case 'String': - if (input.NULL) { - return new Set(); - } - - return input.SS ? unmarshallStringSet(input.SS) : undefined; - default: - throw new InvalidSchemaError( - schemaType, - `Unrecognized set member type: ${schemaType.memberType}` - ); - } - case 'String': - return input.NULL ? '' : input.S; - case 'Tuple': - return input.L ? unmarshallTuple(schemaType, input.L) : undefined; - } - - throw new InvalidSchemaError(schemaType, 'Unrecognized schema node'); -} - -function unmarshallList( - schemaType: ListType, - input: AttributeValueList -): Array { - const list: Array = []; - for (const element of input) { - list.push(unmarshallValue(schemaType.memberType, element)); - } - - return list; -} - -function unmarshallMap( - schemaType: MapType, - input: AttributeMap -): Map { - const map = new Map(); - for (const key of Object.keys(input)) { - map.set(key, unmarshallValue(schemaType.memberType, input[key])); - } - - return map; -} - -function unmarshallNumberSet(input: NumberSetAttributeValue): Set { - const set = new Set(); - for (const number of input) { - set.add(Number(number)); - } - - return set; -} - -function unmarshallStringSet(input: StringSetAttributeValue): Set { - const set = new Set(); - for (const string of input) { - set.add(string); - } - - return set; -} - -function unmarshallTuple( - schemaType: TupleType, - input: AttributeValueList -): Array { - const {members} = schemaType; - const tuple: Array = []; - for (let i = 0; i < members.length; i++) { - tuple.push(unmarshallValue(members[i], input[i])); - } - - return tuple; -} diff --git a/packages/dynamodb-data-marshaller/tsconfig.json b/packages/dynamodb-data-marshaller/tsconfig.json deleted file mode 100644 index 6022dfc4..00000000 --- a/packages/dynamodb-data-marshaller/tsconfig.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": [ - "es5", - "es2015.iterable", - "es2015.promise", - "es2015.collection", - "es2015.symbol.wellknown" - ], - "downlevelIteration": true, - "importHelpers": true, - "module": "commonjs", - "strict": true, - "noUnusedLocals": true, - "declaration": true, - "sourceMap": true, - "rootDir": "./src", - "outDir": "./build" - }, - "typedocOptions": { - "mode": "file", - "out": "../../docs/packages/dynamodb-data-marshaller", - "excludeNotExported": true, - "excludePrivate": true, - "hideGenerator": true - } -} diff --git a/packages/dynamodb-data-marshaller/tsconfig.test.json b/packages/dynamodb-data-marshaller/tsconfig.test.json deleted file mode 100644 index 48fd804f..00000000 --- a/packages/dynamodb-data-marshaller/tsconfig.test.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "sourceMap": false, - "inlineSourceMap": true, - "inlineSources": true, - "sourceRoot": "./src", - "outDir": "./build" - } -} diff --git a/packages/dynamodb-expressions/.npmignore b/packages/dynamodb-expressions/.npmignore deleted file mode 100644 index 1d116ecc..00000000 --- a/packages/dynamodb-expressions/.npmignore +++ /dev/null @@ -1,10 +0,0 @@ -/src -/node_modules -/coverage - -*.spec.d.ts -*.spec.js -*.spec.js.map - -tsconfig.json -tsconfig.test.json diff --git a/packages/dynamodb-expressions/LICENSE b/packages/dynamodb-expressions/LICENSE deleted file mode 100644 index da05f5c9..00000000 --- a/packages/dynamodb-expressions/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2017 Amazon.com, Inc. or its affiliates - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/dynamodb-expressions/README.md b/packages/dynamodb-expressions/README.md deleted file mode 100644 index 8eac0ec6..00000000 --- a/packages/dynamodb-expressions/README.md +++ /dev/null @@ -1,501 +0,0 @@ -# Amazon DynamoDB Expressions - -[![Apache 2 License](https://img.shields.io/github/license/awslabs/dynamodb-data-mapper-js.svg?style=flat)](http://aws.amazon.com/apache-2-0/) - -This library provides a number of abstractions designed to make dealing with -Amazon DynamoDB expressions easier and more natural for JavaScript developers. - -## Attribute paths - -The `AttributePath` class provides a simple way to write [DynamoDB document -paths](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.Attributes.html#Expressions.Attributes.NestedElements.DocumentPathExamples). -If the constructor receives a string, it will parse the path by scanning for -dots (`.`), which designate map property dereferencing and left brackets (`[`), -which designate list attribute dereferencing. For example, -`'ProductReviews.FiveStar[0].reviewer.username'` would be understood as -referring to the `username` property of the `reviewer` property of the first -element of the list stored at the `FiveStar` property of the top-level -`ProductReviews` document attribute. - -If a property name contains a left bracket or dot, it may be escaped with a -backslash `\`. For example, `Product\.Reviews` would be interpreted as a single -top-level document attribute rather than as a map property access. - -## Attribute values - -This library will marshall values encountered using runtime type detection. If -you have a value that is already in the format expected by DynamoDB, you may -pass it to the `AttributeValue` constructor to direct other expression helpers -not to marshall the value further. - -## Condition expressions - -DynamoDB condition expressions may come in the form of a function call or as the -combination of values and infix operators. This library therefore defines a -`ConditionExpression` as the union of [`FunctionExpression`](#function-expressions) -and a tagged union of the expression operator types. Expressions may be compound -or simple. - -### Compound expressions - -These expressions envelope one or more simple expressions and are true or false -based on the value of the subexpressions they contain. The recognized compound -expressions are: - -#### `And` expressions - -Asserts that all of the subexpressions' conditions are satisfied. - -```typescript -import {ConditionExpression} from '@aws/dynamodb-expressions'; - -const andExpression: ConditionExpression = { - type: 'And', - conditions: [ - // one or more subexpressions - ] -}; -``` - -#### `Or` expressions - -Asserts that at least one of the subexpressions' conditions are satisfied. - -```typescript -import {ConditionExpression} from '@aws/dynamodb-expressions'; - -const orExpression: ConditionExpression = { - type: 'Or', - conditions: [ - // one or more subexpressions - ] -}; -``` - -#### `Not` expressions - -Asserts that the subexpression's condition is not satisfied. - -```typescript -import {ConditionExpression} from '@aws/dynamodb-expressions'; - -const notExpression: ConditionExpression = { - type: 'Not', - condition: { - type: 'LessThan', - subject: 'foo', - object: 100 - } -}; -``` - -### Simple expressions - -These expressions make an assertion about a property in a DynamoDB object known -as the expression's `subject`. The `subject` must be a string or an [attribute -path](#attribute-paths). - -The particular assertion used is referred to in this library as a -`ConditionExpressionPredicate`. A predicate may be declared separately from its -`subject` but only becomes a valid expression when paired with a `subject`. The -supported condition expression predicates are: - -#### `Equals` expression predicate - -Creates a condition which is true if the defined `subject` is equal to the -defined `object`. For example, the following predicate object asserts that the -subject has a value of `'bar'`: - -```typescript -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; - -let equalsExpressionPredicate: ConditionExpressionPredicate = { - type: 'Equals', - object: 'bar' -}; - -// you can also define an equality predicate with the `equals` helper method -import {equals} from '@aws/dynamodb-expressions'; - -equalsExpressionPredicate = equals('bar'); - -// combine with a subject to create a valid condition expression -const equalsExpression: ConditionExpression = { - ...equalsExpressionPredicate, - subject: 'foo' -}; -``` - -`object` may be an [attribute path](#attribute-paths), an [attribute -value](#attribute-values), or another type. If the lattermost type is received, -it will be serialized using the `@aws/dynamodb-auto-marshaller` package. - -#### `NotEquals` expression predicate - -Creates a condition which is true if the defined `subject` is NOT equal to the -defined `object`. For example, the following predicate object asserts that the -subject does not have a value of `'bar'`: - -```typescript -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; - -let equalsExpressionPredicate: ConditionExpressionPredicate = { - type: 'NotEquals', - object: 'bar' -}; - -// you can also define an equality predicate with the `equals` helper method -import {notEquals} from '@aws/dynamodb-expressions'; - -equalsExpressionPredicate = notEquals('bar'); - -// combine with a subject to create a valid condition expression -const equalsExpression: ConditionExpression = { - ...equalsExpressionPredicate, - subject: 'foo' -}; -``` - -`object` may be an [attribute path](#attribute-paths), an [attribute -value](#attribute-values), or another type. If the lattermost type is received, -it will be serialized using the `@aws/dynamodb-auto-marshaller` package. - -#### `LessThan` expression predicate - -Creates a condition which is true if the defined `subject` is less than the -defined `object`. For example, the following predicate object asserts that the -subject is less than 10: - -```typescript -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; - -let equalsExpressionPredicate: ConditionExpressionPredicate = { - type: 'LessThan', - object: 10 -}; - -// you can also define an equality predicate with the `equals` helper method -import {lessThan} from '@aws/dynamodb-expressions'; - -equalsExpressionPredicate = lessThan(10); - -// combine with a subject to create a valid condition expression -const equalsExpression: ConditionExpression = { - ...equalsExpressionPredicate, - subject: 'foo' -}; -``` - -`object` may be an [attribute path](#attribute-paths), an [attribute -value](#attribute-values), or another type. If the lattermost type is received, -it will be serialized using the `@aws/dynamodb-auto-marshaller` package. - -#### `LessThanOrEqualTo` expression predicate - -Creates a condition which is true if the defined `subject` is less than or equal -to the defined `object`. For example, the following predicate object asserts -that the subject is less than or equal to 10: - -```typescript -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; - -let equalsExpressionPredicate: ConditionExpressionPredicate = { - type: 'LessThanOrEqualTo', - object: 10 -}; - -// you can also define an equality predicate with the `equals` helper method -import {lessThanOrEqualTo} from '@aws/dynamodb-expressions'; - -equalsExpressionPredicate = lessThanOrEqualTo(10); - -// combine with a subject to create a valid condition expression -const equalsExpression: ConditionExpression = { - ...equalsExpressionPredicate, - subject: 'foo' -}; -``` - -`object` may be an [attribute path](#attribute-paths), an [attribute -value](#attribute-values), or another type. If the lattermost type is received, -it will be serialized using the `@aws/dynamodb-auto-marshaller` package. - -#### `GreaterThan` expression predicate - -Creates a condition which is true if the defined `subject` is greater than the -defined `object`. For example, the following predicate object asserts that the -subject is greater than 10: - -```typescript -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; - -let equalsExpressionPredicate: ConditionExpressionPredicate = { - type: 'GreaterThan', - object: 10 -}; - -// you can also define an equality predicate with the `equals` helper method -import {greaterThan} from '@aws/dynamodb-expressions'; - -equalsExpressionPredicate = greaterThan(10); - -// combine with a subject to create a valid condition expression -const equalsExpression: ConditionExpression = { - ...equalsExpressionPredicate, - subject: 'foo' -}; -``` - -`object` may be an [attribute path](#attribute-paths), an [attribute -value](#attribute-values), or another type. If the lattermost type is received, -it will be serialized using the `@aws/dynamodb-auto-marshaller` package. - -#### `GreaterThanOrEqualTo` expression predicate - -Creates a condition which is true if the defined `subject` is greater than or -equal to the defined `object`. For example, the following predicate object -asserts that the subject is greater than or equal to 10: - -```typescript -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; - -let equalsExpressionPredicate: ConditionExpressionPredicate = { - type: 'GreaterThanOrEqualTo', - object: 10 -}; - -// you can also define an equality predicate with the `equals` helper method -import {greaterThanOrEqualTo} from '@aws/dynamodb-expressions'; - -equalsExpressionPredicate = greaterThanOrEqualTo(10); - -// combine with a subject to create a valid condition expression -const equalsExpression: ConditionExpression = { - ...equalsExpressionPredicate, - subject: 'foo' -}; -``` - -`object` may be an [attribute path](#attribute-paths), an [attribute -value](#attribute-values), or another type. If the lattermost type is received, -it will be serialized using the `@aws/dynamodb-auto-marshaller` package. - -#### `Between` expression predicate - -Creates a condition which is true if the defined `subject` is between a defined -`lowerBound` and `upperBound`. For example, the following predicate object -asserts that the subject is greater than or equal to 10 and less than or equal -to 99: - -```typescript -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; - -let equalsExpressionPredicate: ConditionExpressionPredicate = { - type: 'Between', - lowerBound: 10, - upperBound: 99 -}; - -// you can also define an equality predicate with the `equals` helper method -import {between} from '@aws/dynamodb-expressions'; - -equalsExpressionPredicate = between(10, 99); - -// combine with a subject to create a valid condition expression -const equalsExpression: ConditionExpression = { - ...equalsExpressionPredicate, - subject: 'foo' -}; -``` - -`lowerBound` and `upperBound` may both be an [attribute path](#attribute-paths), -an [attribute value](#attribute-values), or another type. If the lattermost type -is received, it will be serialized using the `@aws/dynamodb-auto-marshaller` -package. - -#### `Membership` expression predicate - -Creates a condition which is true if the defined `subject` is equal to a member -of a list of defined values. For example, the following predicate object asserts -that the subject is one of `'fizz'`, `'buzz'`, or `'fizzbuzz'`: - -```typescript -import { - ConditionExpression, - ConditionExpressionPredicate, -} from '@aws/dynamodb-expressions'; - -let equalsExpressionPredicate: ConditionExpressionPredicate = { - type: 'Membership', - values: ['fizz', 'buzz', 'fizzbuzz'] -}; - -// you can also define an equality predicate with the `equals` helper method -import {inList} from '@aws/dynamodb-expressions'; - -equalsExpressionPredicate = inList('fizz', 'buzz', 'fizzbuzz'); - -// combine with a subject to create a valid condition expression -const equalsExpression: ConditionExpression = { - ...equalsExpressionPredicate, - subject: 'foo' -}; -``` - -Each value in the `values` array may be an [attribute path](#attribute-paths), -an [attribute value](#attribute-values), or another type. If the lattermost type -is received, it will be serialized using the `@aws/dynamodb-auto-marshaller` -package. - -### Serializing condition expressions - -To serialize a condition expression, pass a `ConditionExpression` object and an -instance of `ExpressionAttributes`. - -## Expression attributes - -Amazon DynamoDB expressions are serialized as strings with semantically -important control characters and reserved words. The `ExpressionAttributes` -object will escape both attribute names and attribute values for safe use in -any expression. When a full DynamoDB request input is ready to be sent, you can -retrieve a the `ExpressionAttributeNames` and `ExpressionAttributeValues` shapes -to send alongside the input: - -```typescript -import { - AttributePath, - AttributeValue, - ExpressionAttributes, -} from '@aws/dynamodb-expressions'; -const DynamoDb = require('aws-sdk/clients/dynamodb'); - -const attributes = new ExpressionAttributes(); - -// you can add a string attribute name -const escapedFoo = attributes.addName('foo'); -// or a complex path -const escapedPath = attributes.addName('bar.baz[3].snap.crackle.pop'); -// or an already parsed attribute path -attributes.addName(new AttributePath('path.to.nested.field')); - -// raw JavaScript values added will be converted to AttributeValue objects -const escapedRaw = attributes.addValue(42); -// already marshalled values must be wrapped in an AttributeValue object -const escapedMarshalled = attributes.addValue(new AttributeValue({N: "42"})); - -const client = new DynamoDb(); -client.query({ - TableName: 'my_table', - KeyConditionExpression: `${escapedFoo} = ${escapedRaw} AND ${escapedPath} = ${escapedMarshalled}`, - ExpressionAttributeNames: attributes.names, - ExpressionAttributeValues: attributes.values, -}) -``` - -## Function expressions - -Function expressions represent named functions that DynamoDB will execute on -your behalf. The first parameter passed to the `FunctionExpression` represents -the function name and must be a string; all subsequent parameters represent -arguments to pass to the function. These parameters may be instances of -`AttributePath` (to have the function evaluate part of the DynamoDB document to -which the function applies), `AttributeValue` (for already-marshalled -AttributeValue objects), or arbitrary JavaScript values (these will be converted -by the `@aws/dynamodb-auto-marshaller` package's `Marshaller`): - -```typescript -import { - AttributePath, - ExpressionAttributes, - FunctionExpression, -} from '@aws/dynamodb-expressions'; - -const expr = new FunctionExpression( - 'list_append', - new AttributePath('path.to.list'), - 'foo' -); -const attributes = new ExpressionAttributes(); -// serializes as 'list_append(#attr0.#attr1.#attr2, :val3)' -const serialized = expr.serialize(attributes); -console.log(attributes.names); // {'#attr0': 'path', '#attr1': 'to', '#attr2': 'list'} -console.log(attributes.values); // {':val3': {S: 'foo'}} -``` - -## Mathematical expressions - -Mathematical expressions are used in the `SET` clause of update expressions to -add or subtract numbers from attribute properties containing number values: - -```typescript -import {MathematicalExpression} from '@aws/dynamodb-expressions'; - -const expr = new MathematicalExpression('version', '+', 1); -``` - -## Projection Expressions - -Projection expressions tell DynamoDB which attributes to include in fetched -records returned by `GetItem`, `Query`, or `Scan` operations. This library uses -`ProjectionExpression` as a type alias for an array of strings and -`AttributePath` objects. - -## Update Expressions - -Update expressions allow the partial, in place update of a record in DynamoDB. -The expression may have up to four clauses, one containing directives to set -values in the record, one containing directives to remove attributes from the -record, one containing directives to add values to a set, and the last -containing directives to delete values from a set. - -```typescript -import { - AttributePath, - FunctionExpression, - UpdateExpression, -} from '@aws/dynamodb-expressions'; - -const expr = new UpdateExpression(); - -// set a value by providing its key and the desired value -expr.set('foo', 'bar'); -// you may also set properties in nested maps and lists -expr.set( - 'path.to.my.desired[2].property', - new FunctionExpression( - 'list_append', - new AttributePath('path.to.my.desired[2].property'), - 'baz' - ) -); - -// remove a value by providing its key or path -expr.remove('fizz.buzz.pop[0]'); - -// add a value to a set -expr.add('string_set', 'foo'); - -// delete a value from the same set -expr.delete('string_set', 'bar'); -``` \ No newline at end of file diff --git a/packages/dynamodb-expressions/package.json b/packages/dynamodb-expressions/package.json deleted file mode 100644 index 35cf1df4..00000000 --- a/packages/dynamodb-expressions/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "@aws/dynamodb-expressions", - "version": "0.7.3", - "description": "Composable expression objects for Amazon DynamoDB", - "keywords": [ - "aws", - "dynamodb" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" - }, - "bugs": { - "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" - }, - "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-expressions/", - "main": "./build/index.js", - "types": "./build/index.d.ts", - "scripts": { - "docs": "typedoc src", - "prepublishOnly": "tsc", - "pretest": "tsc -p tsconfig.test.json", - "test": "jest \"build/(.+).spec.js\"" - }, - "author": { - "name": "AWS SDK for JavaScript Team", - "email": "aws-sdk-js@amazon.com" - }, - "license": "Apache-2.0", - "devDependencies": { - "@types/jest": "^24", - "@types/node": "^8.0.4", - "aws-sdk": "^2.7.0", - "jest": "^24", - "typedoc": "^0.14.0", - "typescript": "^3.4" - }, - "peerDependencies": { - "aws-sdk": "^2.7.0" - }, - "dependencies": { - "@aws/dynamodb-auto-marshaller": "^0.7.1", - "tslib": "^1.9" - } -} diff --git a/packages/dynamodb-expressions/src/AttributeBearingExpression.ts b/packages/dynamodb-expressions/src/AttributeBearingExpression.ts deleted file mode 100644 index f9a91334..00000000 --- a/packages/dynamodb-expressions/src/AttributeBearingExpression.ts +++ /dev/null @@ -1,11 +0,0 @@ -import {ExpressionAttributes} from "./ExpressionAttributes"; - -export interface AttributeBearingExpression { - /** - * Convert the function expression represented by this object into the - * string format expected by DynamoDB. Any attribute names and values - * will be replaced with substitutions supplied by the provided - * ExpressionAttributes object. - */ - serialize(attributes: ExpressionAttributes): string; -} diff --git a/packages/dynamodb-expressions/src/AttributePath.spec.ts b/packages/dynamodb-expressions/src/AttributePath.spec.ts deleted file mode 100644 index 14902e02..00000000 --- a/packages/dynamodb-expressions/src/AttributePath.spec.ts +++ /dev/null @@ -1,116 +0,0 @@ -import {AttributePath, PathElement} from "./AttributePath"; - -describe('AttributePath', () => { - it('should convert a string path to a list of elements', () => { - expect( - new AttributePath('foo.bar.baz[3][4][2].fizz[0].buzz[1]').elements - ).toEqual([ - {type: 'AttributeName', name: 'foo'}, - {type: 'AttributeName', name: 'bar'}, - {type: 'AttributeName', name: 'baz'}, - {type: 'ListIndex', index: 3}, - {type: 'ListIndex', index: 4}, - {type: 'ListIndex', index: 2}, - {type: 'AttributeName', name: 'fizz'}, - {type: 'ListIndex', index: 0}, - {type: 'AttributeName', name: 'buzz'}, - {type: 'ListIndex', index: 1}, - ]); - }); - - it('should clone an iterable of elements passed to the constructor', () => { - const elements: Array = [ - {type: 'AttributeName', name: 'foo'}, - {type: 'AttributeName', name: 'bar'}, - {type: 'AttributeName', name: 'baz'}, - {type: 'ListIndex', index: 3}, - {type: 'ListIndex', index: 4}, - {type: 'ListIndex', index: 2}, - {type: 'AttributeName', name: 'fizz'}, - {type: 'ListIndex', index: 0}, - {type: 'AttributeName', name: 'buzz'}, - {type: 'ListIndex', index: 1}, - ]; - const path = new AttributePath(elements); - - expect(path.elements).toEqual(elements); - expect(path.elements).not.toBe(elements); - - elements.shift(); - expect(path.elements).not.toEqual(elements); - expect(path.elements.slice(1)).toEqual(elements); - }); - - it('should allow attribute names with embedded control characters', () => { - expect(new AttributePath('_bracket_\\[_period_\\._backslash_\\\\_unescaped_backslash_\\_.foo').elements).toEqual([ - {type: 'AttributeName', name: '_bracket_[_period_._backslash_\\_unescaped_backslash_\\_'}, - {type: 'AttributeName', name: 'foo'}, - ]); - }); - - describe('path correctness checking', () => { - it( - 'should throw an error when a path begins with a control character', - () => { - expect(() => new AttributePath('[1]')) - .toThrowError(/Invalid control character/); - } - ); - - it( - 'should throw an error when a list index access contains invalid characters', - () => { - expect(() => new AttributePath('foo[a]')) - .toThrowError(/Invalid array index character/); - } - ); - - it( - 'should throw an error when a list index access contains no characters', - () => { - expect(() => new AttributePath('foo[]')) - .toThrowError(/Invalid array index/); - } - ); - - it( - 'should throw an error when an identifier immediately follows a list index access', - () => { - expect(() => new AttributePath('foo[1]a')) - .toThrowError(/Bare identifier encountered/); - } - ); - }); - - describe('::isAttributePath', () => { - const ctor = AttributePath; - - afterEach(() => { - (AttributePath as any) = ctor; - }); - - it('should return true for AttributePath objects', () => { - expect( - AttributePath.isAttributePath(new AttributePath('foo')) - ).toBe(true); - }); - - it('should return false for scalar values', () => { - for (let scalar of ['string', 123.234, true, null, void 0]) { - expect(AttributePath.isAttributePath(scalar)).toBe(false); - } - }); - - it( - 'should return true for AttributePaths created with a different instance of the AttributePath constructor', - () => { - const {isAttributePath} = AttributePath; - const path = new AttributePath('foo.bar'); - (AttributePath as any) = () => path; - - expect(path).not.toBeInstanceOf(AttributePath); - expect(isAttributePath(path)).toBe(true); - } - ); - }); -}); diff --git a/packages/dynamodb-expressions/src/AttributePath.ts b/packages/dynamodb-expressions/src/AttributePath.ts deleted file mode 100644 index 3d6e4415..00000000 --- a/packages/dynamodb-expressions/src/AttributePath.ts +++ /dev/null @@ -1,154 +0,0 @@ -const ATTRIBUTE_PATH_TAG = 'AmazonDynamoDbAttributePath'; -const EXPECTED_TAG = `[object ${ATTRIBUTE_PATH_TAG}]`; - -/** - * The path to an attribute of a DynamoDB item or to a property - * or member thereof. Supports map property access (`map.property`) - * and list member access (`list[1]`). - * - * Control characters that are part of the property identifier may be - * used when escaped with a backslash (`\`) character. - */ -export class AttributePath { - readonly elements: Array; - readonly [Symbol.toStringTag] = ATTRIBUTE_PATH_TAG; - - constructor(path: string|Iterable) { - if (typeof path === 'string') { - this.elements = parsePath(path); - } else { - this.elements = [...path]; - } - } - - /** - * Determine if the provided value is an AttributePath object. - * Compatible with AttributePath objects generated in other iframes - * or Node VMs. - */ - static isAttributePath(arg: any): arg is AttributePath { - return arg instanceof AttributePath - || Object.prototype.toString.call(arg) === EXPECTED_TAG; - } -} - -/** - * A string identifying a top-level property of a DynamoDB item or - * of a MapAttributeValue. - */ -export interface AttributeName { - type: 'AttributeName'; - name: string; -} - -/** - * The index of a particular member of a ListAttributeValue. - */ -export interface ListIndex { - type: 'ListIndex'; - index: number; -} - -export type PathElement = AttributeName|ListIndex; - -const enum ParseState { - controlCharacter = 1000, - identifier, - listIndex, -} -const LEFT_BRACKET = '['; -const RIGHT_BRACKET = ']'; -const PATH_DELIMITER = '.'; -const ESCAPE_CHARACTER = '\\'; - -function parsePath(path: string): Array { - const elements: Array = []; - let state: ParseState = ParseState.identifier; - let collected = ''; - - for ( - let iter = path[Symbol.iterator](), - curr = iter.next(), - peek = iter.next(); - curr.done === false; - curr = peek, peek = iter.next() - ) { - if (state === ParseState.identifier) { - switch (curr.value) { - case LEFT_BRACKET: - state = ParseState.listIndex; - // fallthrough - case PATH_DELIMITER: - if (collected === '') { - throw new Error( - `Invalid control character encountered in path: ${path}` - ); - } - elements.push({type: 'AttributeName', name: collected}); - collected = ''; - break; - case ESCAPE_CHARACTER: - if ( - peek.value === PATH_DELIMITER || - peek.value === LEFT_BRACKET || - peek.value === ESCAPE_CHARACTER - ) { - curr = peek; - peek = iter.next(); - } - // fallthrough - default: - collected += curr.value; - } - } else if (state === ParseState.listIndex) { - switch (curr.value) { - case RIGHT_BRACKET: - const intVal = parseInt(collected); - if (!isFinite(intVal)) { - throw new Error( - `Invalid array index (${collected}) encountered in path: ${path}` - ); - } - elements.push({type: 'ListIndex', index: intVal}); - collected = ''; - state = ParseState.controlCharacter; - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - collected += curr.value; - break; - default: - throw new Error( - `Invalid array index character (${curr.value}) encountered in path: ${path}` - ); - } - } else { - switch (curr.value) { - case LEFT_BRACKET: - state = ParseState.listIndex; - break; - case PATH_DELIMITER: - state = ParseState.identifier; - break; - default: - throw new Error( - `Bare identifier encountered between list index accesses in path: ${path}` - ); - } - } - } - - if (collected.length > 0) { - elements.push({type: 'AttributeName', name: collected}); - } - - return elements; -} diff --git a/packages/dynamodb-expressions/src/AttributeValue.spec.ts b/packages/dynamodb-expressions/src/AttributeValue.spec.ts deleted file mode 100644 index fdc2efd3..00000000 --- a/packages/dynamodb-expressions/src/AttributeValue.spec.ts +++ /dev/null @@ -1,36 +0,0 @@ -import {AttributeValue} from "./AttributeValue"; - -describe('AttributeValue', () => { - describe('::isAttributeValue', () => { - it('should accept valid attribute values', () => { - const value = new AttributeValue({ - S: 'string', - }); - - expect( - AttributeValue.isAttributeValue(value) - ).toBe(true); - }); - - it('should reject non-matching values', () => { - for (const notAttributeValue of [ - false, - true, - null, - void 0, - 'string', - 123, - [], - {}, - new Uint8Array(12), - {foo: 'bar'}, - {name: 'foo', arguments: 'bar'}, - {S: 'string'} - ]) { - expect( - AttributeValue.isAttributeValue(notAttributeValue) - ).toBe(false); - } - }); - }); -}); diff --git a/packages/dynamodb-expressions/src/AttributeValue.ts b/packages/dynamodb-expressions/src/AttributeValue.ts deleted file mode 100644 index 20ad27e2..00000000 --- a/packages/dynamodb-expressions/src/AttributeValue.ts +++ /dev/null @@ -1,22 +0,0 @@ -import {AttributeValue as BaseAttributeValue} from 'aws-sdk/clients/dynamodb'; - -const MARSHALLED_ATTRIBUTE_VALUE_TAG = 'AmazonDynamoDbAttributeValue'; -const EXPECTED_TOSTRING = `[object ${MARSHALLED_ATTRIBUTE_VALUE_TAG}]`; - -/** - * An object containing an already-marshalled DynamoDB AttributeValue. - * Intended to allow marshalled AttributeValues to be identified as such - * and distinguished from maps that resemble marshalled values. - */ -export class AttributeValue { - readonly [Symbol.toStringTag] = MARSHALLED_ATTRIBUTE_VALUE_TAG; - - constructor( - public readonly marshalled: BaseAttributeValue - ) {} - - static isAttributeValue(arg: any): arg is AttributeValue { - return arg instanceof AttributeValue - || Object.prototype.toString.call(arg) === EXPECTED_TOSTRING; - } -} diff --git a/packages/dynamodb-expressions/src/ConditionExpression.spec.ts b/packages/dynamodb-expressions/src/ConditionExpression.spec.ts deleted file mode 100644 index 2067e04b..00000000 --- a/packages/dynamodb-expressions/src/ConditionExpression.spec.ts +++ /dev/null @@ -1,648 +0,0 @@ -import { - equals, - notEquals, - lessThan, - lessThanOrEqualTo, - greaterThan, - greaterThanOrEqualTo, - between, - inList, - isConditionExpression, - isConditionExpressionPredicate, - isConditionExpressionSubject, - serializeConditionExpression, - attributeExists, - attributeNotExists, - attributeType, - beginsWith, - contains, -} from "./ConditionExpression"; -import {ExpressionAttributes} from "./ExpressionAttributes"; -import {AttributePath} from "./AttributePath"; -import {FunctionExpression} from "./FunctionExpression"; - -describe('equals', () => { - it('should return an equality condition predicate', () => { - const pred = equals(new AttributePath('foo')); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('Equals'); - }); -}); - -describe('notEquals', () => { - it('should return an inequality condition predicate', () => { - const pred = notEquals(new AttributePath('foo')); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('NotEquals'); - }); -}); - -describe('lessThan', () => { - it('should return an < condition predicate', () => { - const pred = lessThan(new AttributePath('foo')); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('LessThan'); - }); -}); - -describe('lessThanOrEqualTo', () => { - it('should return an <= condition predicate', () => { - const pred = lessThanOrEqualTo(new AttributePath('foo')); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('LessThanOrEqualTo'); - }); -}); - -describe('greaterThan', () => { - it('should return an > condition predicate', () => { - const pred = greaterThan(new AttributePath('foo')); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('GreaterThan'); - }); -}); - -describe('greaterThanOrEqualTo', () => { - it('should return an >= condition predicate', () => { - const pred = greaterThanOrEqualTo(new AttributePath('foo')); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('GreaterThanOrEqualTo'); - }); -}); - -describe('between', () => { - it('should return a bounded condition predicate', () => { - const pred = between(1, 10); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred).toEqual({ - type: 'Between', - lowerBound: 1, - upperBound: 10, - }); - }); -}); - -describe('inList', () => { - it('should return a membership condition predicate', () => { - const pred = inList('foo', 'bar', 'baz', 'quux'); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred).toEqual({ - type: 'Membership', - values: [ - 'foo', - 'bar', - 'baz', - 'quux', - ] - }); - }); -}); - -describe('attributeExists', () => { - it('should return an attribute_exists function predicate', () => { - const pred = attributeExists(); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('Function'); - expect(pred.name).toBe('attribute_exists'); - }); -}); - -describe('attributeNotExists', () => { - it('should return an attribute_not_exists function predicate', () => { - const pred = attributeNotExists(); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('Function'); - expect(pred.name).toBe('attribute_not_exists'); - }); -}); - -describe('attributeType', () => { - it('should return an attribute_type function predicate', () => { - const pred = attributeType('BOOL'); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('Function'); - expect(pred.name).toBe('attribute_type'); - expect(pred.expected).toBe('BOOL'); - }); -}); - -describe('beginsWith', () => { - it('should return an begins_with function predicate', () => { - const pred = beginsWith('prefix'); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('Function'); - expect(pred.name).toBe('begins_with'); - expect(pred.expected).toBe('prefix'); - }); -}); - -describe('contains', () => { - it('should return an contains function predicate', () => { - const pred = contains('substr'); - expect(isConditionExpressionPredicate(pred)).toBe(true); - expect(pred.type).toBe('Function'); - expect(pred.name).toBe('contains'); - expect(pred.expected).toBe('substr'); - }); -}); - -describe('isConditionExpressionPredicate', () => { - it('should return true for a valid predicate', () => { - expect(isConditionExpressionPredicate({type: 'Equals', object: 0})) - .toBe(true); - }); - - it('should reject non-matching values', () => { - for (const notPredicate of [ - false, - true, - null, - void 0, - 'string', - 123, - [], - {}, - new Uint8Array(12), - {foo: 'bar'}, - {name: 'foo', arguments: 'bar'}, - {S: 'string'} - ]) { - expect(isConditionExpressionPredicate(notPredicate)).toBe(false); - } - }); -}); - -describe('isConditionExpressionSubject', () => { - it('should return true for a string subject', () => { - expect(isConditionExpressionSubject({subject: 'foo'})).toBe(true); - }); - - it('should return true for an AttributePath subject', () => { - expect(isConditionExpressionSubject({ - subject: new AttributePath('foo.bar[3]'), - })).toBe(true); - }); - - it('should reject non-matching values', () => { - for (const notSubject of [ - false, - true, - null, - void 0, - 'string', - 123, - [], - {}, - new Uint8Array(12), - {foo: 'bar'}, - {name: 'foo', arguments: 'bar'}, - {S: 'string'}, - {subject: 123}, - ]) { - expect(isConditionExpressionSubject(notSubject)).toBe(false); - } - }); -}); - -describe('isConditionExpression', () => { - it('should return true for valid expressions', () => { - expect(isConditionExpression({ - type: 'Equals', - subject: 'foo', - object: 'bar', - })).toBe(true); - }); - - it('should return true for function expressions', () => { - expect(isConditionExpression( - new FunctionExpression('attribute_not_exists', 'foo') - )).toBe(true); - }); - - it('should return true for negation expressions', () => { - expect(isConditionExpression({ - type: 'Not', - condition: { - type: 'Between', - subject: 'foo', - lowerBound: 100, - upperBound: 200, - } - })).toBe(true); - }); - - it('should return true for compound expressions', () => { - for (const type of ['And', 'Or']) { - expect(isConditionExpression({ - type, - conditions: [ - { - type: 'Between', - subject: 'foo', - lowerBound: 100, - upperBound: 200, - }, - { - type: 'Between', - subject: 'foo', - lowerBound: 400, - upperBound: 600, - }, - ] - })).toBe(true); - } - }); - - it('should reject compound expressions without a conditions list', () => { - for (const type of ['And', 'Or']) { - expect(isConditionExpression({type})).toBe(false); - } - }); - - it( - 'should reject compound expressions whose list contains invalid members', - () => { - - for (const type of ['And', 'Or']) { - expect(isConditionExpression({ - type, - conditions: ['foo', 123], - })).toBe(false); - } - } - ); - - it('should reject non-matching values', () => { - for (const notExpression of [ - false, - true, - null, - void 0, - 'string', - 123, - [], - {}, - new Uint8Array(12), - {foo: 'bar'}, - {name: 'foo', arguments: 'bar'}, - {S: 'string'}, - {subject: 'foo', object: 'bar'}, - {type: 'UnknownType', subject: 'foo', object: 'bar'}, - ]) { - expect(isConditionExpression(notExpression)).toBe(false); - } - }); -}); - -describe('serializeConditionExpression', () => { - it('should serialize equality expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'Equals', - subject: 'foo', - object: 'bar', - }, - attributes - ); - - expect(serialized).toBe('#attr0 = :val1'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({':val1': {S: 'bar'}}); - }); - - it('should serialize inequality expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - {type: 'NotEquals', subject: 'foo', object: 'bar'}, - attributes - ); - - expect(serialized).toBe('#attr0 <> :val1'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({':val1': {S: 'bar'}}); - }); - - it('should serialize less than expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - {type: 'LessThan', subject: 'foo', object: 'bar'}, - attributes - ); - - expect(serialized).toBe('#attr0 < :val1'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({':val1': {S: 'bar'}}); - }); - - it('should serialize greater than expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'GreaterThan', - subject: 'foo', - object: new FunctionExpression('size', new AttributePath('bar')), - }, - attributes - ); - - expect(serialized).toBe('#attr0 > size(#attr1)'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr1': 'bar' - }); - expect(attributes.values).toEqual({}); - }); - - it('should serialize less than or equal to expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'LessThanOrEqualTo', - subject: 'foo', - object: 'bar', - }, - attributes - ); - - expect(serialized).toBe('#attr0 <= :val1'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({':val1': {S: 'bar'}}); - }); - - it('should serialize greater than or equal to expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'GreaterThanOrEqualTo', - subject: 'foo', - object: new AttributePath('bar'), - }, - attributes - ); - - expect(serialized).toBe('#attr0 >= #attr1'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr1': 'bar', - }); - expect(attributes.values).toEqual({}); - }); - - it('should serialize bounding expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'Between', - subject: 'foo', - lowerBound: 1, - upperBound: 10, - }, - attributes - ); - - expect(serialized).toBe('#attr0 BETWEEN :val1 AND :val2'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({ - ':val1': {N: '1'}, - ':val2': {N: '10'}, - }); - }); - - it('should serialize membership expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'Membership', - subject: 'foo', - values: [ - 1, - 10, - 100, - ], - }, - attributes - ); - - expect(serialized).toBe('#attr0 IN (:val1, :val2, :val3)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({ - ':val1': {N: '1'}, - ':val2': {N: '10'}, - ':val3': {N: '100'}, - }); - }); - - describe('function expressions', () => { - it('should serialize attribute_exists expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - {type: 'Function', subject: 'foo', name: 'attribute_exists'}, - attributes - ); - - expect(serialized).toBe('attribute_exists(#attr0)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({}); - }); - - it('should serialize attribute_not_exists expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - {type: 'Function', subject: 'foo', name: 'attribute_not_exists'}, - attributes - ); - - expect(serialized).toBe('attribute_not_exists(#attr0)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({}); - }); - - it('should serialize attribute_type expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'Function', - subject: 'foo', - name: 'attribute_type', - expected: 'S' - }, - attributes - ); - - expect(serialized).toBe('attribute_type(#attr0, :val1)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({':val1': {S: 'S'}}); - }); - - it('should serialize begins_with expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'Function', - subject: 'foo', - name: 'begins_with', - expected: 'prefix' - }, - attributes - ); - - expect(serialized).toBe('begins_with(#attr0, :val1)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({':val1': {S: 'prefix'}}); - }); - - it('should serialize contains expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'Function', - subject: 'foo', - name: 'contains', - expected: 'substr' - }, - attributes - ); - - expect(serialized).toBe('contains(#attr0, :val1)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({':val1': {S: 'substr'}}); - }); - }); - - it('should serialize negation expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'Not', - condition: { - type: 'Between', - subject: 'foo', - lowerBound: 1, - upperBound: 10, - } - }, - attributes - ); - - expect(serialized).toBe('NOT (#attr0 BETWEEN :val1 AND :val2)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({ - ':val1': {N: '1'}, - ':val2': {N: '10'}, - }); - }); - - it('should serialize and expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'And', - conditions: [ - { - type: 'GreaterThanOrEqualTo', - subject: 'foo', - object: 1, - }, - { - type: 'LessThan', - subject: 'foo', - object: 10, - }, - { - type: 'Equals', - subject: 'fizz', - object: 'buzz', - } - ] - }, - attributes - ); - - expect(serialized).toBe('(#attr0 >= :val1) AND (#attr0 < :val2) AND (#attr3 = :val4)'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr3': 'fizz', - }); - expect(attributes.values).toEqual({ - ':val1': {N: '1'}, - ':val2': {N: '10'}, - ':val4': {S: 'buzz'}, - }); - }); - - it('should serialize single-clause and expressions as the underlying expression type', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'And', - conditions: [ - { - type: 'Membership', - subject: 'foo', - values: [ - 1, - 10, - 100, - ], - }, - ] - }, - attributes - ); - - expect(serialized).toBe('#attr0 IN (:val1, :val2, :val3)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({ - ':val1': {N: '1'}, - ':val2': {N: '10'}, - ':val3': {N: '100'}, - }); - }); - - it('should serialize or expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - { - type: 'Or', - conditions: [ - { - type: 'GreaterThanOrEqualTo', - subject: 'foo', - object: 10, - }, - { - type: 'LessThan', - subject: 'foo', - object: 1, - } - ] - }, - attributes - ); - - expect(serialized).toBe('(#attr0 >= :val1) OR (#attr0 < :val2)'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - }); - expect(attributes.values).toEqual({ - ':val1': {N: '10'}, - ':val2': {N: '1'}, - }); - }); - - it('should serialize function expressions', () => { - const attributes = new ExpressionAttributes(); - const serialized = serializeConditionExpression( - new FunctionExpression( - 'attribute_type', - new AttributePath('foo'), - 'S' - ), - attributes - ); - - expect(serialized).toBe('attribute_type(#attr0, :val1)'); - expect(attributes.names).toEqual({'#attr0': 'foo'}); - expect(attributes.values).toEqual({':val1': {S: 'S'}}); - }); -}); diff --git a/packages/dynamodb-expressions/src/ConditionExpression.ts b/packages/dynamodb-expressions/src/ConditionExpression.ts deleted file mode 100644 index 2ac7f353..00000000 --- a/packages/dynamodb-expressions/src/ConditionExpression.ts +++ /dev/null @@ -1,499 +0,0 @@ -import {AttributePath} from "./AttributePath"; -import {AttributeValue} from "./AttributeValue"; -import {ExpressionAttributes} from "./ExpressionAttributes"; -import {FunctionExpression} from "./FunctionExpression"; - -export type ComparisonOperand = AttributePath|AttributeValue|FunctionExpression|any; - -export interface BinaryComparisonPredicate { - /** - * The value against which the comparison subject will be compared. - */ - object: ComparisonOperand; -} - -/** - * A comparison predicate asserting that the subject and object are equal. - */ -export interface EqualityExpressionPredicate extends BinaryComparisonPredicate { - type: 'Equals'; -} - -/** - * Create an expression predicate asserting that the subject is equal to the - * predicate. - */ -export function equals( - operand: ComparisonOperand -): EqualityExpressionPredicate { - return { - type: 'Equals', - object: operand, - }; -} - -/** - * A comparison predicate asserting that the subject and object are not equal. - */ -export interface InequalityExpressionPredicate extends BinaryComparisonPredicate { - type: 'NotEquals'; -} - -export function notEquals( - operand: ComparisonOperand -): InequalityExpressionPredicate { - return { - type: 'NotEquals', - object: operand, - } -} - -/** - * A comparison predicate asserting that the subject is less than the object. - */ -export interface LessThanExpressionPredicate extends BinaryComparisonPredicate { - type: 'LessThan'; -} - -export function lessThan( - operand: ComparisonOperand -): LessThanExpressionPredicate { - return { - type: 'LessThan', - object: operand, - } -} - -/** - * A comparison predicate asserting that the subject is less than or equal to - * the object. - */ -export interface LessThanOrEqualToExpressionPredicate extends BinaryComparisonPredicate { - type: 'LessThanOrEqualTo'; -} - -export function lessThanOrEqualTo( - operand: ComparisonOperand -): LessThanOrEqualToExpressionPredicate { - return { - type: 'LessThanOrEqualTo', - object: operand, - } -} - -/** - * A comparison predicate asserting that the subject is greater than the object. - */ -export interface GreaterThanExpressionPredicate extends BinaryComparisonPredicate { - type: 'GreaterThan'; -} - -export function greaterThan( - operand: ComparisonOperand -): GreaterThanExpressionPredicate { - return { - type: 'GreaterThan', - object: operand, - } -} - -/** - * A comparison predicate asserting that the subject is greater than or equal - * to the object. - */ -export interface GreaterThanOrEqualToExpressionPredicate extends BinaryComparisonPredicate { - type: 'GreaterThanOrEqualTo'; -} - -export function greaterThanOrEqualTo( - operand: ComparisonOperand -): GreaterThanOrEqualToExpressionPredicate { - return { - type: 'GreaterThanOrEqualTo', - object: operand, - } -} - -/** - * A comparison predicate asserting that the subject is between two bounds. - */ -export interface BetweenExpressionPredicate { - type: 'Between'; - lowerBound: ComparisonOperand; - upperBound: ComparisonOperand; -} - -export function between( - lowerBound: ComparisonOperand, - upperBound: ComparisonOperand -): BetweenExpressionPredicate { - return { - type: 'Between', - lowerBound, - upperBound, - } -} - -/** - * A comparison predicate asserting that the subject is equal to any member of - * the provided list of values. - */ -export interface MembershipExpressionPredicate { - type: 'Membership'; - values: Array; -} - -export function inList( - ...operands: Array -): MembershipExpressionPredicate { - return { - type: 'Membership', - values: operands, - } -} - -/** - * An object structure used as the base of all function expression predicates. - */ -export interface BaseFunctionExpressionPredicate { - type: 'Function'; - name: string; -} - -/** - * A comparison predicate asserting that the subject is contained in a given - * record. - */ -export interface AttributeExistsPredicate extends - BaseFunctionExpressionPredicate -{ - name: 'attribute_exists'; -} - -export function attributeExists(): AttributeExistsPredicate { - return { - type: 'Function', - name: 'attribute_exists', - }; -} - - -/** - * A comparison predicate asserting that the subject is **not** contained in a - * given record. - */ -export interface AttributeNotExistsPredicate extends - BaseFunctionExpressionPredicate -{ - name: 'attribute_not_exists'; -} - -export function attributeNotExists(): AttributeNotExistsPredicate { - return { - type: 'Function', - name: 'attribute_not_exists', - }; -} - -export type AttributeType = 'S'|'SS'|'N'|'NS'|'B'|'BS'|'BOOL'|'NULL'|'L'|'M'; - -/** - * A comparison predicate asserting that the subject is of the specified type. - */ -export interface AttributeTypePredicate extends - BaseFunctionExpressionPredicate -{ - name: 'attribute_type'; - expected: AttributeType; -} - -export function attributeType(expected: AttributeType): AttributeTypePredicate { - return { - type: 'Function', - name: 'attribute_type', - expected, - }; -} - -/** - * A comparison predicate asserting that the value of the subject in a given - * record begins with the specified string. - */ -export interface BeginsWithPredicate extends - BaseFunctionExpressionPredicate -{ - name: 'begins_with'; - expected: string; -} - -export function beginsWith(expected: string): BeginsWithPredicate { - return { - type: 'Function', - name: 'begins_with', - expected, - }; -} - -/** - * A comparison predicate asserting that the value of the subject in a given - * record contains the specified string. - */ -export interface ContainsPredicate extends - BaseFunctionExpressionPredicate -{ - name: 'contains'; - expected: string; -} - -export function contains(expected: string): ContainsPredicate { - return { - type: 'Function', - name: 'contains', - expected, - }; -} - -export type FunctionExpressionPredicate = - AttributeExistsPredicate | - AttributeNotExistsPredicate | - AttributeTypePredicate | - BeginsWithPredicate | - ContainsPredicate; - -export type ConditionExpressionPredicate = - EqualityExpressionPredicate | - InequalityExpressionPredicate | - LessThanExpressionPredicate | - LessThanOrEqualToExpressionPredicate | - GreaterThanExpressionPredicate | - GreaterThanExpressionPredicate | - GreaterThanOrEqualToExpressionPredicate | - BetweenExpressionPredicate | - MembershipExpressionPredicate | - FunctionExpressionPredicate; - -/** - * Evaluate whether the provided value is a condition expression predicate. - */ -export function isConditionExpressionPredicate( - arg: any -): arg is ConditionExpressionPredicate { - if (arg && typeof arg === 'object') { - switch (arg.type) { - case 'Equals': - case 'NotEquals': - case 'LessThan': - case 'LessThanOrEqualTo': - case 'GreaterThan': - case 'GreaterThanOrEqualTo': - return arg.object !== undefined; - case 'Between': - return arg.lowerBound !== undefined - && arg.upperBound !== undefined; - case 'Membership': - return Array.isArray(arg.values); - case 'Function': - switch (arg.name) { - case 'attribute_exists': - case 'attribute_not_exists': - return true; - case 'attribute_type': - case 'begins_with': - case 'contains': - return typeof arg.expected === 'string'; - } - } - } - - return false; -} - -export interface ConditionExpressionSubject { - /** - * The path to the item attribute containing the subject of the comparison. - */ - subject: AttributePath|string; -} - -export function isConditionExpressionSubject( - arg: any -): arg is ConditionExpressionSubject { - return Boolean(arg) - && typeof arg === 'object' - && (typeof arg.subject === 'string' || AttributePath.isAttributePath(arg.subject)); -} - -export type SimpleConditionExpression = ConditionExpressionSubject & - ConditionExpressionPredicate; - -export type ConditionExpression = - SimpleConditionExpression | - AndExpression | - OrExpression | - NotExpression | - FunctionExpression; - -/** - * A comparison expression asserting that all conditions in the provided list - * are true. - */ -export interface AndExpression { - type: 'And'; - conditions: Array; -} - -/** - * A comparison expression asserting that one or more conditions in the provided - * list are true. - */ -export interface OrExpression { - type: 'Or'; - conditions: Array; -} - -/** - * A comparison expression asserting that the provided condition is not true. - */ -export interface NotExpression { - type: 'Not'; - condition: ConditionExpression; -} - -/** - * Evaluates whether the provided value is a condition expression. - */ -export function isConditionExpression(arg: any): arg is ConditionExpression { - if (FunctionExpression.isFunctionExpression(arg)) { - return true; - } - - if (Boolean(arg) && typeof arg === 'object') { - switch (arg.type) { - case 'Not': - return isConditionExpression(arg.condition); - case 'And': - case 'Or': - if (Array.isArray(arg.conditions)) { - for (const condition of arg.conditions) { - if (!isConditionExpression(condition)) { - return false; - } - } - - return true; - } - - return false; - default: - return isConditionExpressionSubject(arg) - && isConditionExpressionPredicate(arg); - } - } - - return false; -} - -/** - * Convert the provided condition expression object to a string, escaping any - * values and attributes to expression-safe placeholders whose expansion value - * will be managed by the provided ExpressionAttributes object. - */ -export function serializeConditionExpression( - condition: ConditionExpression, - attributes: ExpressionAttributes -): string { - if (FunctionExpression.isFunctionExpression(condition)) { - return condition.serialize(attributes); - } - - switch (condition.type) { - case 'Equals': - return serializeBinaryComparison(condition, attributes, '='); - case 'NotEquals': - return serializeBinaryComparison(condition, attributes, '<>'); - case 'LessThan': - return serializeBinaryComparison(condition, attributes, '<'); - case 'LessThanOrEqualTo': - return serializeBinaryComparison(condition, attributes, '<='); - case 'GreaterThan': - return serializeBinaryComparison(condition, attributes, '>'); - case 'GreaterThanOrEqualTo': - return serializeBinaryComparison(condition, attributes, '>='); - case 'Between': - return `${ - attributes.addName(condition.subject) - } BETWEEN ${ - serializeOperand(condition.lowerBound, attributes) - } AND ${ - serializeOperand(condition.upperBound, attributes) - }`; - case 'Membership': - return `${ - attributes.addName(condition.subject) - } IN (${ - condition.values.map(val => serializeOperand(val, attributes)) - .join(', ') - })`; - case 'Function': - const subject = AttributePath.isAttributePath(condition.subject) - ? condition.subject - : new AttributePath(condition.subject); - - switch (condition.name) { - case 'attribute_exists': - case 'attribute_not_exists': - return (new FunctionExpression(condition.name, subject)) - .serialize(attributes); - case 'attribute_type': - case 'begins_with': - case 'contains': - return (new FunctionExpression( - condition.name, - subject, - condition.expected - )) - .serialize(attributes); - } - case 'Not': - return `NOT (${ - serializeConditionExpression(condition.condition, attributes) - })`; - case 'And': - case 'Or': - if (condition.conditions.length === 1) { - return serializeConditionExpression( - condition.conditions[0], - attributes - ); - } - - return condition.conditions - .map(cond => `(${serializeConditionExpression(cond, attributes)})`) - .join(` ${condition.type.toUpperCase()} `); - } -} - -function serializeBinaryComparison( - cond: BinaryComparisonPredicate & ConditionExpressionSubject, - attributes: ExpressionAttributes, - comparator: string -): string { - return `${ - attributes.addName(cond.subject) - } ${comparator} ${ - serializeOperand(cond.object, attributes) - }`; -} - -function serializeOperand( - operand: ComparisonOperand, - attributes: ExpressionAttributes -): string { - if (FunctionExpression.isFunctionExpression(operand)) { - return operand.serialize(attributes); - } - - return AttributePath.isAttributePath(operand) - ? attributes.addName(operand) - : attributes.addValue(operand); -} diff --git a/packages/dynamodb-expressions/src/ExpressionAttributes.spec.ts b/packages/dynamodb-expressions/src/ExpressionAttributes.spec.ts deleted file mode 100644 index cc8023c9..00000000 --- a/packages/dynamodb-expressions/src/ExpressionAttributes.spec.ts +++ /dev/null @@ -1,686 +0,0 @@ -import {ExpressionAttributes} from "./ExpressionAttributes"; -import {ExpressionAttributeValueMap} from 'aws-sdk/clients/dynamodb'; -import {AttributePath} from "./AttributePath"; - -describe('ExpressionAttributes', () => { - describe('#addName', () => { - it('should provide expression-safe aliases for names', () => { - const ea = new ExpressionAttributes(); - for (const reservedWord of DDB_RESERVED_WORDS) { - const alias = ea.addName(reservedWord); - expect(alias).toMatch(/^#[A-Za-z0-9]+$/); - expect(alias).not.toBe(reservedWord); - expect(DDB_RESERVED_WORDS.has(alias)).toBe(false); - } - }); - - it( - 'should return the same alias for a name submitted multiple times', - () => { - const ea = new ExpressionAttributes(); - for (const reservedWord of DDB_RESERVED_WORDS) { - const alias = ea.addName(reservedWord); - for (let i = 0; i < 10; i++) { - expect(ea.addName(reservedWord)).toBe(alias); - } - } - } - ); - - it( - 'should provide an ExpressionAttributeNameMap of all aliased names', - () => { - const ea = new ExpressionAttributes(); - for (const reservedWord of DDB_RESERVED_WORDS) { - ea.addName(reservedWord); - } - - const {names} = ea; - const reservedWords = new Set(DDB_RESERVED_WORDS); - for (const expressionSafeName of Object.keys(names)) { - const original = names[expressionSafeName]; - expect(reservedWords.delete(original)).toBe(true); - } - - expect(reservedWords.size).toBe(0); - } - ); - - it('should allow the addition of list index dereferences', () => { - const attributes = new ExpressionAttributes(); - attributes.addName(new AttributePath('foo[2]')); - - expect(attributes.names).toEqual({ - '#attr0': 'foo', - }); - }); - - it('should allow the addition of nested attributes', () => { - const attributes = new ExpressionAttributes(); - attributes.addName(new AttributePath('foo.bar')); - - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr1': 'bar', - }); - }); - - it( - 'should allow the nesting of complex attributes to an arbitrary depth', - () => { - const attributes = new ExpressionAttributes(); - attributes.addName(new AttributePath('foo.bar[3].baz[4].quux.snap.crackle.pop[2][1][0]')); - - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr1': 'bar', - '#attr2': 'baz', - '#attr3': 'quux', - '#attr4': 'snap', - '#attr5': 'crackle', - '#attr6': 'pop', - }); - } - ); - }); - - describe('#addValue', () => { - it('should provide expression-safe aliases for values', () => { - const ea = new ExpressionAttributes(); - for (const reservedWord of DDB_RESERVED_WORDS) { - const alias = ea.addValue(reservedWord); - expect(alias).toMatch(/^:[A-Za-z0-9]+$/); - } - }); - - it( - 'should provide an ExpressionAttributeValueMap of all aliased values', - () => { - const expected: ExpressionAttributeValueMap = {}; - const ea = new ExpressionAttributes(); - for (const reservedWord of DDB_RESERVED_WORDS) { - const alias = ea.addValue(reservedWord); - expected[alias] = {S: reservedWord}; - } - - expect(ea.values).toEqual(expected); - } - ); - }); -}); - -const DDB_RESERVED_WORDS = new Set([ - 'ABORT', - 'ABSOLUTE', - 'ACTION', - 'ADD', - 'AFTER', - 'AGENT', - 'AGGREGATE', - 'ALL', - 'ALLOCATE', - 'ALTER', - 'ANALYZE', - 'AND', - 'ANY', - 'ARCHIVE', - 'ARE', - 'ARRAY', - 'AS', - 'ASC', - 'ASCII', - 'ASENSITIVE', - 'ASSERTION', - 'ASYMMETRIC', - 'AT', - 'ATOMIC', - 'ATTACH', - 'ATTRIBUTE', - 'AUTH', - 'AUTHORIZATION', - 'AUTHORIZE', - 'AUTO', - 'AVG', - 'BACK', - 'BACKUP', - 'BASE', - 'BATCH', - 'BEFORE', - 'BEGIN', - 'BETWEEN', - 'BIGINT', - 'BINARY', - 'BIT', - 'BLOB', - 'BLOCK', - 'BOOLEAN', - 'BOTH', - 'BREADTH', - 'BUCKET', - 'BULK', - 'BY', - 'BYTE', - 'CALL', - 'CALLED', - 'CALLING', - 'CAPACITY', - 'CASCADE', - 'CASCADED', - 'CASE', - 'CAST', - 'CATALOG', - 'CHAR', - 'CHARACTER', - 'CHECK', - 'CLASS', - 'CLOB', - 'CLOSE', - 'CLUSTER', - 'CLUSTERED', - 'CLUSTERING', - 'CLUSTERS', - 'COALESCE', - 'COLLATE', - 'COLLATION', - 'COLLECTION', - 'COLUMN', - 'COLUMNS', - 'COMBINE', - 'COMMENT', - 'COMMIT', - 'COMPACT', - 'COMPILE', - 'COMPRESS', - 'CONDITION', - 'CONFLICT', - 'CONNECT', - 'CONNECTION', - 'CONSISTENCY', - 'CONSISTENT', - 'CONSTRAINT', - 'CONSTRAINTS', - 'CONSTRUCTOR', - 'CONSUMED', - 'CONTINUE', - 'CONVERT', - 'COPY', - 'CORRESPONDING', - 'COUNT', - 'COUNTER', - 'CREATE', - 'CROSS', - 'CUBE', - 'CURRENT', - 'CURSOR', - 'CYCLE', - 'DATA', - 'DATABASE', - 'DATE', - 'DATETIME', - 'DAY', - 'DEALLOCATE', - 'DEC', - 'DECIMAL', - 'DECLARE', - 'DEFAULT', - 'DEFERRABLE', - 'DEFERRED', - 'DEFINE', - 'DEFINED', - 'DEFINITION', - 'DELETE', - 'DELIMITED', - 'DEPTH', - 'DEREF', - 'DESC', - 'DESCRIBE', - 'DESCRIPTOR', - 'DETACH', - 'DETERMINISTIC', - 'DIAGNOSTICS', - 'DIRECTORIES', - 'DISABLE', - 'DISCONNECT', - 'DISTINCT', - 'DISTRIBUTE', - 'DO', - 'DOMAIN', - 'DOUBLE', - 'DROP', - 'DUMP', - 'DURATION', - 'DYNAMIC', - 'EACH', - 'ELEMENT', - 'ELSE', - 'ELSEIF', - 'EMPTY', - 'ENABLE', - 'END', - 'EQUAL', - 'EQUALS', - 'ERROR', - 'ESCAPE', - 'ESCAPED', - 'EVAL', - 'EVALUATE', - 'EXCEEDED', - 'EXCEPT', - 'EXCEPTION', - 'EXCEPTIONS', - 'EXCLUSIVE', - 'EXEC', - 'EXECUTE', - 'EXISTS', - 'EXIT', - 'EXPLAIN', - 'EXPLODE', - 'EXPORT', - 'EXPRESSION', - 'EXTENDED', - 'EXTERNAL', - 'EXTRACT', - 'FAIL', - 'FALSE', - 'FAMILY', - 'FETCH', - 'FIELDS', - 'FILE', - 'FILTER', - 'FILTERING', - 'FINAL', - 'FINISH', - 'FIRST', - 'FIXED', - 'FLATTERN', - 'FLOAT', - 'FOR', - 'FORCE', - 'FOREIGN', - 'FORMAT', - 'FORWARD', - 'FOUND', - 'FREE', - 'FROM', - 'FULL', - 'FUNCTION', - 'FUNCTIONS', - 'GENERAL', - 'GENERATE', - 'GET', - 'GLOB', - 'GLOBAL', - 'GO', - 'GOTO', - 'GRANT', - 'GREATER', - 'GROUP', - 'GROUPING', - 'HANDLER', - 'HASH', - 'HAVE', - 'HAVING', - 'HEAP', - 'HIDDEN', - 'HOLD', - 'HOUR', - 'IDENTIFIED', - 'IDENTITY', - 'IF', - 'IGNORE', - 'IMMEDIATE', - 'IMPORT', - 'IN', - 'INCLUDING', - 'INCLUSIVE', - 'INCREMENT', - 'INCREMENTAL', - 'INDEX', - 'INDEXED', - 'INDEXES', - 'INDICATOR', - 'INFINITE', - 'INITIALLY', - 'INLINE', - 'INNER', - 'INNTER', - 'INOUT', - 'INPUT', - 'INSENSITIVE', - 'INSERT', - 'INSTEAD', - 'INT', - 'INTEGER', - 'INTERSECT', - 'INTERVAL', - 'INTO', - 'INVALIDATE', - 'IS', - 'ISOLATION', - 'ITEM', - 'ITEMS', - 'ITERATE', - 'JOIN', - 'KEY', - 'KEYS', - 'LAG', - 'LANGUAGE', - 'LARGE', - 'LAST', - 'LATERAL', - 'LEAD', - 'LEADING', - 'LEAVE', - 'LEFT', - 'LENGTH', - 'LESS', - 'LEVEL', - 'LIKE', - 'LIMIT', - 'LIMITED', - 'LINES', - 'LIST', - 'LOAD', - 'LOCAL', - 'LOCALTIME', - 'LOCALTIMESTAMP', - 'LOCATION', - 'LOCATOR', - 'LOCK', - 'LOCKS', - 'LOG', - 'LOGED', - 'LONG', - 'LOOP', - 'LOWER', - 'MAP', - 'MATCH', - 'MATERIALIZED', - 'MAX', - 'MAXLEN', - 'MEMBER', - 'MERGE', - 'METHOD', - 'METRICS', - 'MIN', - 'MINUS', - 'MINUTE', - 'MISSING', - 'MOD', - 'MODE', - 'MODIFIES', - 'MODIFY', - 'MODULE', - 'MONTH', - 'MULTI', - 'MULTISET', - 'NAME', - 'NAMES', - 'NATIONAL', - 'NATURAL', - 'NCHAR', - 'NCLOB', - 'NEW', - 'NEXT', - 'NO', - 'NONE', - 'NOT', - 'NULL', - 'NULLIF', - 'NUMBER', - 'NUMERIC', - 'OBJECT', - 'OF', - 'OFFLINE', - 'OFFSET', - 'OLD', - 'ON', - 'ONLINE', - 'ONLY', - 'OPAQUE', - 'OPEN', - 'OPERATOR', - 'OPTION', - 'OR', - 'ORDER', - 'ORDINALITY', - 'OTHER', - 'OTHERS', - 'OUT', - 'OUTER', - 'OUTPUT', - 'OVER', - 'OVERLAPS', - 'OVERRIDE', - 'OWNER', - 'PAD', - 'PARALLEL', - 'PARAMETER', - 'PARAMETERS', - 'PARTIAL', - 'PARTITION', - 'PARTITIONED', - 'PARTITIONS', - 'PATH', - 'PERCENT', - 'PERCENTILE', - 'PERMISSION', - 'PERMISSIONS', - 'PIPE', - 'PIPELINED', - 'PLAN', - 'POOL', - 'POSITION', - 'PRECISION', - 'PREPARE', - 'PRESERVE', - 'PRIMARY', - 'PRIOR', - 'PRIVATE', - 'PRIVILEGES', - 'PROCEDURE', - 'PROCESSED', - 'PROJECT', - 'PROJECTION', - 'PROPERTY', - 'PROVISIONING', - 'PUBLIC', - 'PUT', - 'QUERY', - 'QUIT', - 'QUORUM', - 'RAISE', - 'RANDOM', - 'RANGE', - 'RANK', - 'RAW', - 'READ', - 'READS', - 'REAL', - 'REBUILD', - 'RECORD', - 'RECURSIVE', - 'REDUCE', - 'REF', - 'REFERENCE', - 'REFERENCES', - 'REFERENCING', - 'REGEXP', - 'REGION', - 'REINDEX', - 'RELATIVE', - 'RELEASE', - 'REMAINDER', - 'RENAME', - 'REPEAT', - 'REPLACE', - 'REQUEST', - 'RESET', - 'RESIGNAL', - 'RESOURCE', - 'RESPONSE', - 'RESTORE', - 'RESTRICT', - 'RESULT', - 'RETURN', - 'RETURNING', - 'RETURNS', - 'REVERSE', - 'REVOKE', - 'RIGHT', - 'ROLE', - 'ROLES', - 'ROLLBACK', - 'ROLLUP', - 'ROUTINE', - 'ROW', - 'ROWS', - 'RULE', - 'RULES', - 'SAMPLE', - 'SATISFIES', - 'SAVE', - 'SAVEPOINT', - 'SCAN', - 'SCHEMA', - 'SCOPE', - 'SCROLL', - 'SEARCH', - 'SECOND', - 'SECTION', - 'SEGMENT', - 'SEGMENTS', - 'SELECT', - 'SELF', - 'SEMI', - 'SENSITIVE', - 'SEPARATE', - 'SEQUENCE', - 'SERIALIZABLE', - 'SESSION', - 'SET', - 'SETS', - 'SHARD', - 'SHARE', - 'SHARED', - 'SHORT', - 'SHOW', - 'SIGNAL', - 'SIMILAR', - 'SIZE', - 'SKEWED', - 'SMALLINT', - 'SNAPSHOT', - 'SOME', - 'SOURCE', - 'SPACE', - 'SPACES', - 'SPARSE', - 'SPECIFIC', - 'SPECIFICTYPE', - 'SPLIT', - 'SQL', - 'SQLCODE', - 'SQLERROR', - 'SQLEXCEPTION', - 'SQLSTATE', - 'SQLWARNING', - 'START', - 'STATE', - 'STATIC', - 'STATUS', - 'STORAGE', - 'STORE', - 'STORED', - 'STREAM', - 'STRING', - 'STRUCT', - 'STYLE', - 'SUB', - 'SUBMULTISET', - 'SUBPARTITION', - 'SUBSTRING', - 'SUBTYPE', - 'SUM', - 'SUPER', - 'SYMMETRIC', - 'SYNONYM', - 'SYSTEM', - 'TABLE', - 'TABLESAMPLE', - 'TEMP', - 'TEMPORARY', - 'TERMINATED', - 'TEXT', - 'THAN', - 'THEN', - 'THROUGHPUT', - 'TIME', - 'TIMESTAMP', - 'TIMEZONE', - 'TINYINT', - 'TO', - 'TOKEN', - 'TOTAL', - 'TOUCH', - 'TRAILING', - 'TRANSACTION', - 'TRANSFORM', - 'TRANSLATE', - 'TRANSLATION', - 'TREAT', - 'TRIGGER', - 'TRIM', - 'TRUE', - 'TRUNCATE', - 'TTL', - 'TUPLE', - 'TYPE', - 'UNDER', - 'UNDO', - 'UNION', - 'UNIQUE', - 'UNIT', - 'UNKNOWN', - 'UNLOGGED', - 'UNNEST', - 'UNPROCESSED', - 'UNSIGNED', - 'UNTIL', - 'UPDATE', - 'UPPER', - 'URL', - 'USAGE', - 'USE', - 'USER', - 'USERS', - 'USING', - 'UUID', - 'VACUUM', - 'VALUE', - 'VALUED', - 'VALUES', - 'VARCHAR', - 'VARIABLE', - 'VARIANCE', - 'VARINT', - 'VARYING', - 'VIEW', - 'VIEWS', - 'VIRTUAL', - 'VOID', - 'WAIT', - 'WHEN', - 'WHENEVER', - 'WHERE', - 'WHILE', - 'WINDOW', - 'WITH', - 'WITHIN', - 'WITHOUT', - 'WORK', - 'WRAPPED', - 'WRITE', - 'YEAR', - 'ZONE', -]); diff --git a/packages/dynamodb-expressions/src/ExpressionAttributes.ts b/packages/dynamodb-expressions/src/ExpressionAttributes.ts deleted file mode 100644 index 71a44098..00000000 --- a/packages/dynamodb-expressions/src/ExpressionAttributes.ts +++ /dev/null @@ -1,69 +0,0 @@ -import {AttributePath} from "./AttributePath"; -import {AttributeValue} from './AttributeValue'; -import {Marshaller} from "@aws/dynamodb-auto-marshaller"; -import { - AttributeValue as AttributeValueModel, - ExpressionAttributeNameMap, - ExpressionAttributeValueMap, -} from 'aws-sdk/clients/dynamodb'; - -/** - * An object that manages expression attribute name and value substitution. - */ -export class ExpressionAttributes { - readonly names: ExpressionAttributeNameMap = {}; - readonly values: ExpressionAttributeValueMap = {}; - readonly marshaller = new Marshaller(); - - private readonly nameMap: {[attributeName: string]: string} = {}; - private _ctr = 0; - - /** - * Add an attribute path to this substitution context. - * - * @returns The substitution value to use in the expression. The same - * attribute name will always be converted to the same substitution value - * when supplied to the same ExpressionAttributes object multiple times. - */ - addName(path: AttributePath|string): string { - if (AttributePath.isAttributePath(path)) { - let escapedPath = ''; - for (const element of path.elements) { - if (element.type === 'AttributeName') { - escapedPath += `.${this.addAttributeName(element.name)}`; - } else { - escapedPath += `[${element.index}]`; - } - } - - return escapedPath.substring(1); - } - - return this.addName(new AttributePath(path)); - } - - /** - * Add an attribute value to this substitution context. - * - * @returns The substitution value to use in the expression. - */ - addValue(value: any): string { - const modeledAttrValue = AttributeValue.isAttributeValue(value) - ? value.marshalled as AttributeValueModel - : this.marshaller.marshallValue(value) as AttributeValueModel; - - const substitution = `:val${this._ctr++}`; - this.values[substitution] = modeledAttrValue; - - return substitution; - } - - private addAttributeName(attributeName: string): string { - if (!(attributeName in this.nameMap)) { - this.nameMap[attributeName] = `#attr${this._ctr++}`; - this.names[this.nameMap[attributeName]] = attributeName; - } - - return this.nameMap[attributeName]; - } -} diff --git a/packages/dynamodb-expressions/src/FunctionExpression.spec.ts b/packages/dynamodb-expressions/src/FunctionExpression.spec.ts deleted file mode 100644 index 884c98c2..00000000 --- a/packages/dynamodb-expressions/src/FunctionExpression.spec.ts +++ /dev/null @@ -1,78 +0,0 @@ -import {FunctionExpression} from "./FunctionExpression"; -import {ExpressionAttributes} from "./ExpressionAttributes"; -import {AttributePath} from "./AttributePath"; - -describe('FunctionExpression', () => { - const basicFunctionExpression = new FunctionExpression( - 'foo', - new AttributePath('bar'), - 'baz' - ); - - describe('::isFunctionExpression', () => { - it('should accept valid function expressions', () => { - expect( - FunctionExpression.isFunctionExpression(basicFunctionExpression) - ).toBe(true); - }); - - it('should reject non-matching values', () => { - for (const notFunctionExpression of [ - false, - true, - null, - void 0, - 'string', - 123, - [], - {}, - new Uint8Array(12), - {foo: 'bar'}, - {name: 'foo', arguments: 'bar'}, - ]) { - expect( - FunctionExpression.isFunctionExpression(notFunctionExpression) - ).toBe(false); - } - }); - }); - - describe('#serialize', () => { - it('should serialize basic function expressions', () => { - const attributes = new ExpressionAttributes(); - expect( - basicFunctionExpression.serialize(attributes) - ).toBe('foo(#attr0, :val1)'); - - expect(attributes.names).toEqual({ - '#attr0': 'bar', - }); - - expect(attributes.values).toEqual({ - ':val1': {S: 'baz'}, - }); - }); - - it('should support nested function expressions', () => { - const nestedFunction = new FunctionExpression( - 'foo', - new AttributePath('bar'), - 'baz', - new FunctionExpression('fizz', new FunctionExpression('buzz', new AttributePath('bar'))) - ) - const attributes = new ExpressionAttributes(); - - expect( - nestedFunction.serialize(attributes) - ).toBe('foo(#attr0, :val1, fizz(buzz(#attr0)))'); - - expect(attributes.names).toEqual({ - '#attr0': 'bar', - }); - - expect(attributes.values).toEqual({ - ':val1': {S: 'baz'}, - }); - }); - }); -}); diff --git a/packages/dynamodb-expressions/src/FunctionExpression.ts b/packages/dynamodb-expressions/src/FunctionExpression.ts deleted file mode 100644 index b8a8a58d..00000000 --- a/packages/dynamodb-expressions/src/FunctionExpression.ts +++ /dev/null @@ -1,43 +0,0 @@ -import {AttributeBearingExpression} from "./AttributeBearingExpression"; -import {AttributePath} from "./AttributePath"; -import {ExpressionAttributes} from "./ExpressionAttributes"; - -const FUNCTION_EXPRESSION_TAG = 'AmazonDynamoDbFunctionExpression'; -const EXPECTED_TOSTRING = `[object ${FUNCTION_EXPRESSION_TAG}]`; - -/** - * An object representing a DynamoDB function expression. - */ -export class FunctionExpression implements AttributeBearingExpression { - readonly [Symbol.toStringTag] = FUNCTION_EXPRESSION_TAG; - readonly args: Array; - - constructor( - readonly name: string, - ...args: Array - ) { - this.args = args; - } - - serialize(attributes: ExpressionAttributes) { - const expressionSafeArgs: Array = this.args.map(arg => { - if (AttributePath.isAttributePath(arg)) { - return attributes.addName(arg); - } else if (FunctionExpression.isFunctionExpression(arg)) { - return arg.serialize(attributes); - } - - return attributes.addValue(arg); - }); - - return `${this.name}(${expressionSafeArgs.join(', ')})`; - } - - /** - * Evaluate whether the provided value is a FunctionExpression object. - */ - static isFunctionExpression(arg: any): arg is FunctionExpression { - return arg instanceof FunctionExpression - || Object.prototype.toString.call(arg) === EXPECTED_TOSTRING; - } -} diff --git a/packages/dynamodb-expressions/src/MathematicalExpression.spec.ts b/packages/dynamodb-expressions/src/MathematicalExpression.spec.ts deleted file mode 100644 index 441a625c..00000000 --- a/packages/dynamodb-expressions/src/MathematicalExpression.spec.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { AttributePath } from './AttributePath'; -import { ExpressionAttributes } from './ExpressionAttributes'; -import { FunctionExpression } from './FunctionExpression'; -import { MathematicalExpression } from './MathematicalExpression'; -import { - ExpressionAttributeNameMap, - ExpressionAttributeValueMap -} from 'aws-sdk/clients/dynamodb'; - -describe('MathematicalExpression', () => { - const validExpressions: Array<[ - MathematicalExpression, - string, - ExpressionAttributeNameMap, - ExpressionAttributeValueMap - ]> = [ - [ - new MathematicalExpression(new AttributePath('foo'), '+', 1), - '#attr0 + :val1', - { '#attr0': 'foo' }, - { ':val1': {N: '1'} }, - ], - [ - new MathematicalExpression( - new FunctionExpression( - 'if_not_exists', - new AttributePath('current_id'), - 0 - ), - '+', - 1 - ), - 'if_not_exists(#attr0, :val1) + :val2', - { '#attr0': 'current_id' }, - { - ':val1': {N: '0'}, - ':val2': {N: '1'}, - }, - ] - ]; - - describe('::isMathematicalExpression', () => { - it('should accept valid mathematical expressions', () => { - for (const [expr, _1, _2, _3] of validExpressions) { - expect(MathematicalExpression.isMathematicalExpression(expr)) - .toBe(true); - } - }); - - it('should reject non-matching values', () => { - for (const notMathematicalExpression of [ - false, - true, - null, - void 0, - 'string', - 123, - [], - {}, - new Uint8Array(12), - {foo: 'bar'}, - ]) { - expect( - MathematicalExpression - .isMathematicalExpression(notMathematicalExpression) - ).toBe(false); - } - }); - }); - - describe('#serialize', () => { - it('should serialize basic mathematical expressions', () => { - for (const [ - expression, - serialized, - expectedNames, - expectedValues, - ] of validExpressions) { - const attributes = new ExpressionAttributes(); - expect(expression.serialize(attributes)).toBe(serialized); - expect(attributes.names).toEqual(expectedNames); - expect(attributes.values).toEqual(expectedValues); - } - }); - }); -}); diff --git a/packages/dynamodb-expressions/src/MathematicalExpression.ts b/packages/dynamodb-expressions/src/MathematicalExpression.ts deleted file mode 100644 index 06a15108..00000000 --- a/packages/dynamodb-expressions/src/MathematicalExpression.ts +++ /dev/null @@ -1,44 +0,0 @@ -import {AttributeBearingExpression} from "./AttributeBearingExpression"; -import {AttributePath} from "./AttributePath"; -import {ExpressionAttributes} from "./ExpressionAttributes"; -import {FunctionExpression} from "./FunctionExpression"; - -export type MathematicalExpressionOperand = AttributePath|FunctionExpression|string|number; - -const MATHEMATICAL_EXPRESSION_TAG = 'AmazonDynamoDbMathematicalExpression'; -const EXPECTED_TOSTRING = `[object ${MATHEMATICAL_EXPRESSION_TAG}]`; - -/** - * An object representing a DynamoDB function expression. - */ -export class MathematicalExpression implements AttributeBearingExpression { - readonly [Symbol.toStringTag] = MATHEMATICAL_EXPRESSION_TAG; - - constructor( - readonly lhs: MathematicalExpressionOperand, - readonly operator: '+'|'-', - readonly rhs: MathematicalExpressionOperand - ) {} - - serialize(attributes: ExpressionAttributes) { - const safeArgs = [this.lhs, this.rhs].map( - arg => { - if (FunctionExpression.isFunctionExpression(arg)) { - return arg.serialize(attributes); - } - return AttributePath.isAttributePath(arg) || typeof arg === 'string' - ? attributes.addName(arg) - : attributes.addValue(arg); - } - ); - return `${safeArgs[0]} ${this.operator} ${safeArgs[1]}`; - } - - /** - * Evaluate whether the provided value is a MathematicalExpression object. - */ - static isMathematicalExpression(arg: any): arg is MathematicalExpression { - return arg instanceof MathematicalExpression - || Object.prototype.toString.call(arg) === EXPECTED_TOSTRING; - } -} diff --git a/packages/dynamodb-expressions/src/ProjectionExpression.spec.ts b/packages/dynamodb-expressions/src/ProjectionExpression.spec.ts deleted file mode 100644 index 798ac4df..00000000 --- a/packages/dynamodb-expressions/src/ProjectionExpression.spec.ts +++ /dev/null @@ -1,66 +0,0 @@ -import {AttributePath} from "./AttributePath"; -import {ExpressionAttributes} from "./ExpressionAttributes"; -import { - ProjectionExpression, - serializeProjectionExpression -} from './ProjectionExpression'; - -describe('ProjectionExpression', () => { - it('should allow the addition of scalar values', () => { - const attributes = new ExpressionAttributes(); - - expect(serializeProjectionExpression( - ['foo', 'bar', 'baz', 'quux'], - attributes - )).toBe('#attr0, #attr1, #attr2, #attr3'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr1': 'bar', - '#attr2': 'baz', - '#attr3': 'quux', - }); - }); - - it('should allow the addition of list index dereferences', () => { - const attributes = new ExpressionAttributes(); - - expect(serializeProjectionExpression( - [new AttributePath('foo[2]')], - attributes - )).toBe('#attr0[2]'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - }); - }); - - it('should allow the addition of nested attributes', () => { - const attributes = new ExpressionAttributes(); - - expect(serializeProjectionExpression( - [new AttributePath('foo.bar')], - attributes - )).toBe('#attr0.#attr1'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr1': 'bar', - }); - }); - - it( - 'should allow the nesting of complex attributes to an arbitrary depth', - () => { - const attributes = new ExpressionAttributes(); - expect(serializeProjectionExpression( - [new AttributePath('snap.foo[2].bar[3].baz[4].quux')], - attributes - )).toBe('#attr0.#attr1[2].#attr2[3].#attr3[4].#attr4'); - expect(attributes.names).toEqual({ - '#attr0': 'snap', - '#attr1': 'foo', - '#attr2': 'bar', - '#attr3': 'baz', - '#attr4': 'quux', - }); - } - ); -}); diff --git a/packages/dynamodb-expressions/src/ProjectionExpression.ts b/packages/dynamodb-expressions/src/ProjectionExpression.ts deleted file mode 100644 index 73f9ee51..00000000 --- a/packages/dynamodb-expressions/src/ProjectionExpression.ts +++ /dev/null @@ -1,25 +0,0 @@ -import {AttributePath} from "./AttributePath"; -import {ExpressionAttributes} from './ExpressionAttributes'; - -/** - * An array of attributes to project. Each may be represented as - * either an AttributePath object or as a string. - */ -export type ProjectionExpression = Array; - -/** - * Convert the supplied projection expression to the string format - * expected by DynamoDB, substituting all attribute paths using the - * provided ExpressionAttributes object. - */ -export function serializeProjectionExpression( - projection: ProjectionExpression, - attributes: ExpressionAttributes -): string { - const serialized: Array = []; - for (const projected of projection) { - serialized.push(attributes.addName(projected)); - } - - return serialized.join(', '); -} diff --git a/packages/dynamodb-expressions/src/UpdateExpression.spec.ts b/packages/dynamodb-expressions/src/UpdateExpression.spec.ts deleted file mode 100644 index 41c862b1..00000000 --- a/packages/dynamodb-expressions/src/UpdateExpression.spec.ts +++ /dev/null @@ -1,163 +0,0 @@ -import {UpdateExpression} from "./UpdateExpression"; -import {ExpressionAttributes} from "./ExpressionAttributes"; -import {AttributePath} from "./AttributePath"; -import {FunctionExpression} from "./FunctionExpression"; -import {MathematicalExpression} from "./MathematicalExpression"; -import {AttributeValue} from "./AttributeValue"; - -describe('UpdateExpression', () => { - it('should serialize ADD clauses', () => { - const attributes = new ExpressionAttributes(); - const expr = new UpdateExpression(); - expr.add('foo', new Set(['bar', 'baz'])); - expr.add('fizz', 1); - - expect(expr.serialize(attributes)) - .toBe('ADD #attr0 :val1, #attr2 :val3'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr2': 'fizz', - }); - expect(attributes.values).toEqual({ - ':val1': {SS: ['bar', 'baz']}, - ':val3': {N: '1'}, - }); - }); - - it('should serialize DELETE clauses', () => { - const attributes = new ExpressionAttributes(); - const expr = new UpdateExpression(); - expr.delete('foo', new Set(['bar', 'baz'])); - expr.delete('fizz', 1); - - expect(expr.serialize(attributes)) - .toBe('DELETE #attr0 :val1, #attr2 :val3'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr2': 'fizz', - }); - expect(attributes.values).toEqual({ - ':val1': {SS: ['bar', 'baz']}, - ':val3': {N: '1'}, - }); - }); - - it('should serialize REMOVE clauses', () => { - const attributes = new ExpressionAttributes(); - const expr = new UpdateExpression(); - expr.remove('foo'); - expr.remove('fizz'); - - expect(expr.serialize(attributes)).toBe('REMOVE #attr0, #attr1'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr1': 'fizz', - }); - expect(attributes.values).toEqual({}); - }); - - it('should serialize SET clauses', () => { - const attributes = new ExpressionAttributes(); - const expr = new UpdateExpression(); - expr.set('foo', new Set(['bar', 'baz'])); - expr.set('fizz', 1); - - expect(expr.serialize(attributes)) - .toBe('SET #attr0 = :val1, #attr2 = :val3'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr2': 'fizz', - }); - expect(attributes.values).toEqual({ - ':val1': {SS: ['bar', 'baz']}, - ':val3': {N: '1'}, - }); - }); - - it('should serialize SET clauses with function expressions', () => { - const attributes = new ExpressionAttributes(); - const expr = new UpdateExpression(); - expr.set('foo', new FunctionExpression( - 'list_append', - new AttributePath('foo'), - 'bar' - )); - - expect(expr.serialize(attributes)) - .toBe('SET #attr0 = list_append(#attr0, :val1)'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - }); - expect(attributes.values).toEqual({ - ':val1': {S: 'bar'}, - }); - }); - - it('should serialize SET clauses with mathematical expressions', () => { - const attributes = new ExpressionAttributes(); - const expr = new UpdateExpression(); - expr.set('foo', new MathematicalExpression( - new AttributePath('foo'), - '+', - 1 - )); - - expect(expr.serialize(attributes)).toBe('SET #attr0 = #attr0 + :val1'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - }); - expect(attributes.values).toEqual({ - ':val1': {N: '1'}, - }); - }); - - it('should serialize SET clauses with marshalled AttributeValues', () => { - const attributes = new ExpressionAttributes(); - const expr = new UpdateExpression(); - expr.set('foo', new AttributeValue({SS: ['bar', 'baz']})); - expr.set('fizz', new AttributeValue({N: '1'})); - - expect(expr.serialize(attributes)) - .toBe('SET #attr0 = :val1, #attr2 = :val3'); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr2': 'fizz', - }); - expect(attributes.values).toEqual({ - ':val1': {SS: ['bar', 'baz']}, - ':val3': {N: '1'}, - }); - }); - - it('should serialize expressions with multiple clauses', () => { - const attributes = new ExpressionAttributes(); - const expr = new UpdateExpression(); - expr.add('foo', new Set(['baz'])); - expr.delete('foo', new Set(['quux'])); - expr.remove('fizz'); - expr.set('buzz', new Set(['pop'])); - - expect(expr.serialize(attributes)).toBe( - 'ADD #attr0 :val1 DELETE #attr0 :val2 SET #attr3 = :val4 REMOVE #attr5' - ); - expect(attributes.names).toEqual({ - '#attr0': 'foo', - '#attr3': 'buzz', - '#attr5': 'fizz', - }); - expect(attributes.values).toEqual({ - ':val1': {SS: ['baz']}, - ':val2': {SS: ['quux']}, - ':val4': {SS: ['pop']}, - }); - }); - - // it('should support injecting an instance of ExpressionAttributes', () => { - // const attributes = new ExpressionAttributes(); - // const expr = new UpdateExpression({attributes}); - // expr.remove('foo'); - // expr.serialize(attributes); - // - // expect(attributes.names).toEqual({'#attr0': 'foo'}); - // }); -}); diff --git a/packages/dynamodb-expressions/src/UpdateExpression.ts b/packages/dynamodb-expressions/src/UpdateExpression.ts deleted file mode 100644 index 63fc0100..00000000 --- a/packages/dynamodb-expressions/src/UpdateExpression.ts +++ /dev/null @@ -1,99 +0,0 @@ -import {AttributeBearingExpression} from "./AttributeBearingExpression"; -import {AttributePath} from "./AttributePath"; -import {AttributeValue} from './AttributeValue'; -import {ExpressionAttributes} from "./ExpressionAttributes"; -import {FunctionExpression} from "./FunctionExpression"; -import {MathematicalExpression} from "./MathematicalExpression"; - -/** - * An object representing a DynamoDB update expression. - */ -export class UpdateExpression implements AttributeBearingExpression { - readonly toAdd = new Map(); - readonly toDelete = new Map(); - readonly toRemove = new Set(); - readonly toSet = new Map(); - - /** - * Add a directive to the expression's `add` clause. - */ - add(path: AttributePath|string, value: any): void { - this.toAdd.set( - AttributePath.isAttributePath(path) ? path : new AttributePath(path), - value - ); - } - - /** - * Add a directive to the expression's `delete` clause. - */ - delete(path: AttributePath|string, value: any): void { - this.toDelete.set( - AttributePath.isAttributePath(path) ? path : new AttributePath(path), - value - ); - } - - /** - * Add a directive to the expression's `remove` clause. - */ - remove(path: AttributePath|string): void { - this.toRemove.add( - AttributePath.isAttributePath(path) ? path : new AttributePath(path) - ); - } - - /** - * Add a directive to the expression's `set` clause. - */ - set( - path: AttributePath|string, - value: AttributeValue|FunctionExpression|MathematicalExpression|any - ): void { - this.toSet.set( - AttributePath.isAttributePath(path) ? path : new AttributePath(path), - value - ); - } - - serialize(attributes: ExpressionAttributes): string { - const clauses: Array = []; - const phrases: Array = []; - for (const [mapping, verb] of [ - [this.toAdd, 'ADD'], - [this.toDelete, 'DELETE'], - ] as Array<[Map, string]>) { - for (const [key, value] of mapping.entries()) { - phrases.push( - `${attributes.addName(key)} ${attributes.addValue(value)}` - ); - } - - if (phrases.length > 0) { - clauses.push(`${verb} ${phrases.join(', ')}`); - phrases.length = 0; - } - } - - for (const [key, value] of this.toSet.entries()) { - phrases.push(`${attributes.addName(key)} = ${ - FunctionExpression.isFunctionExpression(value) || MathematicalExpression.isMathematicalExpression(value) - ? value.serialize(attributes) : attributes.addValue(value) - }`); - } - if (phrases.length > 0) { - clauses.push(`SET ${phrases.join(', ')}`); - phrases.length = 0; - } - - for (const keyToRemove of this.toRemove) { - phrases.push(attributes.addName(keyToRemove)); - } - if (phrases.length > 0) { - clauses.push(`REMOVE ${phrases.join(', ')}`); - phrases.length = 0; - } - - return clauses.join(' '); - } -} diff --git a/packages/dynamodb-expressions/src/index.ts b/packages/dynamodb-expressions/src/index.ts deleted file mode 100644 index 6fa75ff7..00000000 --- a/packages/dynamodb-expressions/src/index.ts +++ /dev/null @@ -1,8 +0,0 @@ -export * from './AttributePath'; -export * from './AttributeValue'; -export * from './ConditionExpression'; -export * from './ExpressionAttributes'; -export * from './FunctionExpression'; -export * from './MathematicalExpression'; -export * from './ProjectionExpression'; -export * from './UpdateExpression'; diff --git a/packages/dynamodb-expressions/tsconfig.json b/packages/dynamodb-expressions/tsconfig.json deleted file mode 100644 index 733ff25d..00000000 --- a/packages/dynamodb-expressions/tsconfig.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": [ - "es5", - "es2015.iterable", - "es2015.promise", - "es2015.collection", - "es2015.symbol.wellknown" - ], - "downlevelIteration": true, - "importHelpers": true, - "module": "commonjs", - "strict": true, - "declaration": true, - "sourceMap": true, - "rootDir": "./src", - "outDir": "./build" - }, - "typedocOptions": { - "mode": "file", - "out": "../../docs/packages/dynamodb-expressions", - "excludeNotExported": true, - "excludePrivate": true, - "hideGenerator": true - } -} diff --git a/packages/dynamodb-expressions/tsconfig.test.json b/packages/dynamodb-expressions/tsconfig.test.json deleted file mode 100644 index 57f7d5b1..00000000 --- a/packages/dynamodb-expressions/tsconfig.test.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "sourceMap": false, - "inlineSourceMap": true, - "inlineSources": true, - "rootDir": "./src", - "outDir": "./build" - } -} diff --git a/packages/dynamodb-query-iterator/.npmignore b/packages/dynamodb-query-iterator/.npmignore deleted file mode 100644 index 1d116ecc..00000000 --- a/packages/dynamodb-query-iterator/.npmignore +++ /dev/null @@ -1,10 +0,0 @@ -/src -/node_modules -/coverage - -*.spec.d.ts -*.spec.js -*.spec.js.map - -tsconfig.json -tsconfig.test.json diff --git a/packages/dynamodb-query-iterator/CHANGELOG.md b/packages/dynamodb-query-iterator/CHANGELOG.md deleted file mode 100644 index 11dec0d0..00000000 --- a/packages/dynamodb-query-iterator/CHANGELOG.md +++ /dev/null @@ -1,14 +0,0 @@ -# Changelog -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). - -## [0.7.1] -Remove package rollup at `./build/index.mjs` due to bundler incompatibilities. - -## [0.7.0] -Add a package rollup at `./build/index.mjs` to support tree shaking. - -## [0.6.0] -Initial release diff --git a/packages/dynamodb-query-iterator/LICENSE b/packages/dynamodb-query-iterator/LICENSE deleted file mode 100644 index da05f5c9..00000000 --- a/packages/dynamodb-query-iterator/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2017 Amazon.com, Inc. or its affiliates - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/dynamodb-query-iterator/README.md b/packages/dynamodb-query-iterator/README.md deleted file mode 100644 index 167771b4..00000000 --- a/packages/dynamodb-query-iterator/README.md +++ /dev/null @@ -1,357 +0,0 @@ -# Amazon DynamoDB Query and Scan Iteration - -[![Apache 2 License](https://img.shields.io/github/license/awslabs/dynamodb-data-mapper-js.svg?style=flat)](http://aws.amazon.com/apache-2-0/) - -This library provides utilities for automatically iterating over all DynamoDB -records returned by a query or scan operation using [async iterables](https://tc39.github.io/ecma262/#sec-asynciterable-interface). -Each iterator and paginator included in this package automatically tracks -DynamoDB metadata and supports resuming iteration from any point within a full -query or scan. - -## Paginators - -Paginators are asynchronous iterables that yield each page of results returned -by a DynamoDB `query` or `scan` operation. For sequential paginators, each -invocation of the `next` method corresponds to an invocation of the underlying -API operation until all no more pages are available. - -### QueryPaginator - -Retrieves all pages of a DynamoDB `query` in order. - -#### Example usage - -```typescript -import { QueryPaginator } from '@aws/dynamodb-query-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const paginator = new QueryPaginator( - new DynamoDB({region: 'us-west-2'}), - { - TableName: 'my_table', - KeyConditionExpression: 'partitionKey = :value', - ExpressionAttributeValues: { - ':value': {S: 'foo'} - }, - ReturnConsumedCapacity: 'INDEXES' - } -); - -for await (const page of paginator) { - // do something with `page` -} - -// Inspect the total number of items yielded -console.log(paginator.count); - -// Inspect the total number of items scanned by this operation -console.log(paginator.scannedCount); - -// Inspect the capacity consumed by this operation -// This will only be available if `ReturnConsumedCapacity` was set on the input -console.log(paginator.consumedCapacity); -``` - -#### Suspending and resuming queries - -You can suspend any running query from within the `for` loop by using the -`break` keyword. If there are still pages that have not been fetched, the -`lastEvaluatedKey` property of paginator will be defined. This can be provided -as the `ExclusiveStartKey` for another `QueryPaginator` instance: - -```typescript -import { QueryPaginator } from '@aws/dynamodb-query-iterator'; -import { QueryInput } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const dynamoDb = new DynamoDB({region: 'us-west-2'}); -const input: QueryInput = { - TableName: 'my_table', - KeyConditionExpression: 'partitionKey = :value', - ExpressionAttributeValues: { - ':value': {S: 'foo'} - }, - ReturnConsumedCapacity: 'INDEXES' -}; - -const paginator = new QueryPaginator(dynamoDb, input); - -for await (const page of paginator) { - // do something with the first page of results - break -} - -for await (const page of new QueryPaginator(dynamoDb, { - ...input, - ExclusiveStartKey: paginator.lastEvaluatedKey -})) { - // do something with the remaining pages -} -``` - -Suspending and resuming the same paginator instance is not supported. - -### ScanPaginator - -Retrieves all pages of a DynamoDB `scan` in order. - -#### Example usage - -```typescript -import { ScanPaginator } from '@aws/dynamodb-query-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const paginator = new ScanPaginator( - new DynamoDB({region: 'us-west-2'}), - { - TableName: 'my_table', - ReturnConsumedCapacity: 'INDEXES' - } -); - -for await (const page of paginator) { - // do something with `page` -} - -// Inspect the total number of items yielded -console.log(paginator.count); - -// Inspect the total number of items scanned by this operation -console.log(paginator.scannedCount); - -// Inspect the capacity consumed by this operation -// This will only be available if `ReturnConsumedCapacity` was set on the input -console.log(paginator.consumedCapacity); -``` - -#### Suspending and resuming scans - -You can suspend any running scan from within the `for` loop by using the `break` -keyword. If there are still pages that have not been fetched, the -`lastEvaluatedKey` property of paginator will be defined. This can be provided -as the `ExclusiveStartKey` for another `ScanPaginator` instance: - -```typescript -import { ScanPaginator } from '@aws/dynamodb-query-iterator'; -import { ScanInput } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const dynamoDb = new DynamoDB({region: 'us-west-2'}); -const input: ScanInput = { - TableName: 'my_table', - ReturnConsumedCapacity: 'INDEXES' -}; - -const paginator = new ScanPaginator(dynamoDb, input); - -for await (const page of paginator) { - // do something with the first page of results - break -} - -for await (const page of new ScanPaginator(dynamoDb, { - ...input, - ExclusiveStartKey: paginator.lastEvaluatedKey -})) { - // do something with the remaining pages -} -``` - -Suspending and resuming the same paginator instance is not supported. - -### ParallelScanPaginator - -Retrieves all pages of a DynamoDB `scan` utilizing a configurable number of scan -segments that operate in parallel. When performing a parallel scan, you must -specify the total number of segments you wish to use, and neither an -`ExclusiveStartKey` nor a `Segment` identifier may be included with the input -provided. - -#### Example usage - -```typescript -import { ParallelScanPaginator } from '@aws/dynamodb-query-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const paginator = new ParallelScanPaginator( - new DynamoDB({region: 'us-west-2'}), - { - TableName: 'my_table', - TotalSegments: 4, - ReturnConsumedCapacity: 'INDEXES' - } -); - -for await (const page of paginator) { - // do something with `page` -} - -// Inspect the total number of items yielded -console.log(paginator.count); - -// Inspect the total number of items scanned by this operation -console.log(paginator.scannedCount); - -// Inspect the capacity consumed by this operation -// This will only be available if `ReturnConsumedCapacity` was set on the input -console.log(paginator.consumedCapacity); -``` - -#### Suspending and resuming parallel scans - -You can suspend any running scan from within the `for` loop by using the `break` -keyword. If there are still pages that have not been fetched, the `scanState` -property of interrupted paginator can be provided to the constructor of another -`ParallelScanPaginator` instance: - -```typescript -import { - ParallelScanInput, - ParallelScanPaginator, -} from '@aws/dynamodb-query-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const client = new DynamoDB({region: 'us-west-2'}); -const input: ParallelScanInput = { - TableName: 'my_table', - TotalSegments: 4, - ReturnConsumedCapacity: 'INDEXES' -}; - -const paginator = new ParallelScanPaginator(client, input); - -for await (const page of paginator) { - // do something with the first page of results - break -} - -for await (const page of new ParallelScanPaginator( - client, - input, - paginator.scanState -)) { - // do something with the remaining pages -} -``` - -Suspending and resuming the same paginator instance is not supported. - - -## Iterators - -Iterators are asynchronous iterables that yield each of record returned by a -DynamoDB `query` or `scan` operation. Each invocation of the `next` method may -invoke the underlying API operation until all no more pages are available. - -### QueryIterator - -Retrieves all records of a DynamoDB `query` in order. - -#### Example usage - -```typescript -import { QueryIterator } from '@aws/dynamodb-query-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const iterator = new QueryIterator( - new DynamoDB({region: 'us-west-2'}), - { - TableName: 'my_table', - KeyConditionExpression: 'partitionKey = :value', - ExpressionAttributeValues: { - ':value': {S: 'foo'} - }, - ReturnConsumedCapacity: 'INDEXES' - }, - ['partitionKey'] -); - -for await (const record of iterator) { - // do something with `record` -} - -// Inspect the total number of items yielded -console.log(iterator.count); - -// Inspect the total number of items scanned by this operation -console.log(iterator.scannedCount); - -// Inspect the capacity consumed by this operation -// This will only be available if `ReturnConsumedCapacity` was set on the input -console.log(iterator.consumedCapacity); -``` - -### ScanIterator - -Retrieves all records of a DynamoDB `scan` in order. - -#### Example usage - -```typescript -import { ScanIterator } from '@aws/dynamodb-query-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const iterator = new ScanIterator( - new DynamoDB({region: 'us-west-2'}), - { - TableName: 'my_table', - ReturnConsumedCapacity: 'INDEXES' - }, - ['partitionKey', 'sortKey'] -); - -for await (const record of iterator) { - // do something with `record` -} - -// Inspect the total number of items yielded -console.log(iterator.count); - -// Inspect the total number of items scanned by this operation -console.log(iterator.scannedCount); - -// Inspect the capacity consumed by this operation -// This will only be available if `ReturnConsumedCapacity` was set on the input -console.log(iterator.consumedCapacity); -``` - -### ParallelScanIterator - -Retrieves all pages of a DynamoDB `scan` utilizing a configurable number of scan -segments that operate in parallel. When performing a parallel scan, you must -specify the total number of segments you wish to use, and neither an -`ExclusiveStartKey` nor a `Segment` identifier may be included with the input -provided. - -#### Example usage - -```typescript -import { ParallelScanIterator} from '@aws/dynamodb-query-iterator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -const iterator = new ParallelScanIterator( - new DynamoDB({region: 'us-west-2'}), - { - TableName: 'my_table', - TotalSegments: 4, - ReturnConsumedCapacity: 'INDEXES' - }, - ['partitionKey'] -); - -for await (const record of iterator) { - // do something with `record` -} - -// Inspect the total number of items yielded -console.log(iterator.count); - -// Inspect the total number of items scanned by this operation -console.log(iterator.scannedCount); - -// Inspect the capacity consumed by this operation -// This will only be available if `ReturnConsumedCapacity` was set on the input -console.log(iterator.consumedCapacity); -``` - - diff --git a/packages/dynamodb-query-iterator/package.json b/packages/dynamodb-query-iterator/package.json deleted file mode 100644 index 59c6d88b..00000000 --- a/packages/dynamodb-query-iterator/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@aws/dynamodb-query-iterator", - "version": "0.7.1", - "description": "Abstraction for DynamoDB queries and scans that handles pagination and parallel worker coordination", - "keywords": [ - "aws", - "dynamodb" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" - }, - "bugs": { - "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" - }, - "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-scan-iterator/", - "main": "./build/index.js", - "types": "./build/index.d.ts", - "scripts": { - "docs": "typedoc src", - "prepublishOnly": "tsc", - "pretest": "tsc -p tsconfig.test.json", - "test": "jest \"build/(.+).spec.js\"" - }, - "author": { - "name": "AWS SDK for JavaScript Team", - "email": "aws-sdk-js@amazon.com" - }, - "license": "Apache-2.0", - "devDependencies": { - "@types/jest": "^24", - "@types/node": "^8.0.4", - "aws-sdk": "^2.7.0", - "jest": "^24", - "typedoc": "^0.14.0", - "typescript": "^3.4" - }, - "peerDependencies": { - "aws-sdk": "^2.7.0" - }, - "dependencies": { - "tslib": "^1.9" - } -} diff --git a/packages/dynamodb-query-iterator/src/DynamoDbPaginator.ts b/packages/dynamodb-query-iterator/src/DynamoDbPaginator.ts deleted file mode 100644 index 71061194..00000000 --- a/packages/dynamodb-query-iterator/src/DynamoDbPaginator.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { DynamoDbPaginatorInterface } from './DynamoDbPaginatorInterface'; -import { DynamoDbResultsPage } from './DynamoDbResultsPage'; -import { mergeConsumedCapacities } from './mergeConsumedCapacities'; -import { ConsumedCapacity, Key } from 'aws-sdk/clients/dynamodb'; - -if (Symbol && !Symbol.asyncIterator) { - (Symbol as any).asyncIterator = Symbol.for("__@@asyncIterator__"); -} - -export abstract class DynamoDbPaginator implements DynamoDbPaginatorInterface { - private _consumedCapacity?: ConsumedCapacity; - private _count = 0; - private _lastKey?: Key; - private _scannedCount = 0; - private lastResolved: Promise> - = Promise.resolve(); - - protected constructor(private readonly limit?: number) {} - - /** - * @inheritDoc - */ - [Symbol.asyncIterator](): AsyncIterableIterator { - return this; - } - - /** - * @inheritDoc - */ - get consumedCapacity(): ConsumedCapacity|undefined { - return this._consumedCapacity; - } - - /** - * @inheritDoc - */ - get count(): number { - return this._count; - } - - /** - * Get the LastEvaluatedKey of the last result page yielded by this - * paginator or undefined if the scan has already been exhausted. - */ - get lastEvaluatedKey(): Key|undefined { - return this._lastKey; - } - - /** - * @inheritDoc - */ - next(): Promise> { - this.lastResolved = this.lastResolved.then(() => { - if (this.count >= (this.limit === undefined ? Infinity : this.limit)) { - return {done: true} as IteratorResult; - } - - return this.getNext().then(({done, value}) => { - if (value && !done) { - this._lastKey = value.LastEvaluatedKey; - this._count += (value.Items || []).length; - this._scannedCount += (value.ScannedCount || 0); - this._consumedCapacity = mergeConsumedCapacities( - this._consumedCapacity, - value.ConsumedCapacity - ); - } - - return { value, done }; - }) - } - ); - - return this.lastResolved; - } - - /** - * @inheritDoc - */ - return(): Promise> { - // Prevent any further use of this iterator - this.lastResolved = Promise.reject(new Error( - 'Iteration has been manually interrupted and may not be resumed' - )); - this.lastResolved.catch(() => {}); - - return Promise.resolve( - {done: true} as IteratorResult - ); - } - - /** - * @inheritDoc - */ - get scannedCount(): number { - return this._scannedCount; - } - - /** - * Perform the next iteration - */ - protected abstract getNext(): Promise>; - - protected getNextPageSize(requestedPageSize?: number): number|undefined { - if (this.limit === undefined) { - return requestedPageSize; - } - - return Math.min( - requestedPageSize === undefined ? Infinity : requestedPageSize, - this.limit - this.count - ); - } -} diff --git a/packages/dynamodb-query-iterator/src/DynamoDbPaginatorInterface.ts b/packages/dynamodb-query-iterator/src/DynamoDbPaginatorInterface.ts deleted file mode 100644 index ef92ef68..00000000 --- a/packages/dynamodb-query-iterator/src/DynamoDbPaginatorInterface.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { DynamoDbResultsPage } from './DynamoDbResultsPage'; -import { ConsumedCapacity } from 'aws-sdk/clients/dynamodb'; - -export interface DynamoDbPaginatorInterface extends - AsyncIterableIterator -{ - /** - * The capacity units consumed by the Scan operation. The data returned - * includes the total provisioned throughput consumed, along with statistics - * for the table and any indexes involved in the operation. ConsumedCapacity - * is only returned if the ReturnConsumedCapacity parameter was specified. - */ - readonly consumedCapacity: ConsumedCapacity|undefined; - - /** - * The number of items in the results yielded. - */ - readonly count: number; - - /** - * The number of items evaluated, before any ScanFilter is applied. A high - * scannedCount value with few, or no, Count results indicates an - * inefficient Scan operation. For more information, see Count and - * ScannedCount in the Amazon DynamoDB Developer Guide. - */ - readonly scannedCount: number; - - /** - * @inheritDoc - */ - return(): Promise>; -} diff --git a/packages/dynamodb-query-iterator/src/DynamoDbResultsPage.ts b/packages/dynamodb-query-iterator/src/DynamoDbResultsPage.ts deleted file mode 100644 index 550630c3..00000000 --- a/packages/dynamodb-query-iterator/src/DynamoDbResultsPage.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { - AttributeMap, - ConsumedCapacity, - Key, -} from "aws-sdk/clients/dynamodb"; - -export interface DynamoDbResultsPage { - /** - * An array of retrieved items. - */ - Items?: Array; - - /** - * The number of items in the response. If you used a filter in the request, - * then Count is the number of items returned after the filter was applied, - * and ScannedCount is the number of matching items before the filter was - * applied. If you did not use a filter in the request, then Count and - * ScannedCount are the same. - */ - Count?: number; - - /** - * The number of items evaluated, before any filter is applied. A high - * ScannedCount value with few, or no, Count results indicates an - * inefficient operation. For more information, see Count and ScannedCount - * in the Amazon DynamoDB Developer Guide. If you did not use a filter in - * the request, then ScannedCount is the same as Count. - */ - ScannedCount?: number; - - /** - * The primary key of the item where the operation stopped, inclusive of the - * previous result set. Use this value to start a new operation, excluding - * this value in the new request. If LastEvaluatedKey is empty, then the - * "last page" of results has been processed and there is no more data to be - * retrieved. If LastEvaluatedKey is not empty, it does not necessarily mean - * that there is more data in the result set. The only way to know when you - * have reached the end of the result set is when LastEvaluatedKey is empty. - */ - LastEvaluatedKey?: Key; - - /** - * The capacity units consumed by the operation. The data returned includes - * the total provisioned throughput consumed, along with statistics for the - * table and any indexes involved in the operation. ConsumedCapacity is only - * returned if the ReturnConsumedCapacity parameter was specified For more - * information, see Provisioned Throughput in the Amazon DynamoDB Developer - * Guide. - */ - ConsumedCapacity?: ConsumedCapacity; -} diff --git a/packages/dynamodb-query-iterator/src/ItemIterator.ts b/packages/dynamodb-query-iterator/src/ItemIterator.ts deleted file mode 100644 index 85fbccc9..00000000 --- a/packages/dynamodb-query-iterator/src/ItemIterator.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { DynamoDbPaginatorInterface } from './DynamoDbPaginatorInterface'; -import { AttributeMap, ConsumedCapacity } from 'aws-sdk/clients/dynamodb'; - -if (Symbol && !Symbol.asyncIterator) { - (Symbol as any).asyncIterator = Symbol.for("__@@asyncIterator__"); -} - -export abstract class ItemIterator< - Paginator extends DynamoDbPaginatorInterface -> implements AsyncIterableIterator { - - private _iteratedCount = 0; - private lastResolved: Promise> = Promise.resolve(); - private readonly pending: Array = []; - - protected constructor(private readonly paginator: Paginator) {} - - /** - * @inheritDoc - */ - [Symbol.asyncIterator](): AsyncIterableIterator { - return this; - } - - /** - * The capacity units consumed by the Scan operation. The data returned - * includes the total provisioned throughput consumed, along with statistics - * for the table and any indexes involved in the operation. ConsumedCapacity - * is only returned if the ReturnConsumedCapacity parameter was specified. - */ - get consumedCapacity(): ConsumedCapacity|undefined { - return this.paginator.consumedCapacity; - } - - /** - * The number of items that have been iterated over. - */ - get count(): number { - return this._iteratedCount; - } - - /** - * @inheritDoc - */ - next(): Promise> { - this.lastResolved = this.lastResolved.then(() => this.getNext()); - return this.lastResolved; - } - - /** - * Detaches the underlying paginator from this iterator and returns it. The - * paginator will yield arrays of unmarshalled items, with each yielded - * array corresponding to a single call to the underlying API. As with the - * underlying API, pages may contain a variable number of items or no items, - * in which case an empty array will be yielded. - * - * Calling this method will disable further iteration. - */ - pages(): Paginator { - // Prevent the iterator from being used further and squelch any uncaught - // promise rejection warnings - this.lastResolved = Promise.reject(new Error( - 'The underlying paginator has been detached from this iterator.' - )); - this.lastResolved.catch(() => {}); - - return this.paginator; - } - - /** - * @inheritDoc - */ - return(): Promise> { - // Prevent any further use of this iterator - this.lastResolved = Promise.reject(new Error( - 'Iteration has been manually interrupted and may not be resumed' - )); - this.lastResolved.catch(() => {}); - - // Clear the pending queue to free up memory - this.pending.length = 0; - return this.paginator.return().then(doneSigil); - } - - /** - * The number of items evaluated, before any ScanFilter is applied. A high - * scannedCount value with few, or no, Count results indicates an - * inefficient Scan operation. For more information, see Count and - * ScannedCount in the Amazon DynamoDB Developer Guide. - */ - get scannedCount(): number { - return this.paginator.scannedCount; - } - - private getNext(): Promise> { - if (this.pending.length > 0) { - this._iteratedCount++; - return Promise.resolve({ - value: this.pending.shift()!, - done: false - }); - } - - return this.paginator.next().then(({done, value}) => { - if (done) { - return {done} as IteratorResult; - } - - this.pending.push(...value.Items || []); - return this.getNext(); - }); - } -} - -function doneSigil() { - return {done: true} as IteratorResult; -} diff --git a/packages/dynamodb-query-iterator/src/ParallelScanInput.ts b/packages/dynamodb-query-iterator/src/ParallelScanInput.ts deleted file mode 100644 index 3ceeb618..00000000 --- a/packages/dynamodb-query-iterator/src/ParallelScanInput.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { ScanInput } from 'aws-sdk/clients/dynamodb'; - -export interface ParallelScanInput extends ScanInput { - /** - * The exclusive start key for a particular scan segment must be coordinated - * across all active segments. To resume a previously suspending parallel - * scan, provide a `scanState` initializer when creating a - * ParallelScanPaginator. - */ - ExclusiveStartKey?: undefined; - - /** - * The segment identifier for each request will be assigned by the parallel - * scan orchestrator. - */ - Segment?: undefined; - - /** - * @inheritDoc - * - * `TotalSegments` **MUST** be specified when initializing or resuming a - * parallel scan. - */ - TotalSegments: number; -} diff --git a/packages/dynamodb-query-iterator/src/ParallelScanIterator.spec.ts b/packages/dynamodb-query-iterator/src/ParallelScanIterator.spec.ts deleted file mode 100644 index 9e13d4fb..00000000 --- a/packages/dynamodb-query-iterator/src/ParallelScanIterator.spec.ts +++ /dev/null @@ -1,186 +0,0 @@ -import { ParallelScanIterator } from '.'; - -describe('ParallelScanIterator', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - scan: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Items: []})); - mockDynamoDbClient.scan.mockClear(); - mockDynamoDbClient.scan.mockImplementation(() => { - return {promise: promiseFunc}; - }); - }); - - it( - 'should paginate over results and return a promise for each item', - async () => { - const segments = 2; - const keys = ['snap', 'crackle', 'pop', 'foo', 'bar', 'baz']; - let index = 0; - - // Ensure that the first promise won't resolve immediately. This - // would block progress on a sequential scan but should pose no - // problem for a parallel one. - promiseFunc.mockImplementationOnce(() => new Promise(resolve => { - setTimeout( - resolve.bind(null, { - Items: [ - { - fizz: {S: 'quux'}, - bar: {NS: ['5', '12', '13']}, - baz: {L: [{BOOL: true}, {N: '101'}]}, - }, - ], - }), - 50, - ); - } - )); - - // Enqueue a number of responses that will resolve synchronously - for (const key of keys) { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: key}, - bar: {NS: [ - (++index).toString(10), - (++index).toString(10), - ]}, - baz: {L: [ - {BOOL: index % 2 === 0}, - {N: (++index).toString(10)} - ]}, - }, - ], - LastEvaluatedKey: {fizz: {S: key}}, - })); - } - - // Enqueue a final page for this segment - promiseFunc.mockImplementationOnce(() => Promise.resolve({Items: []})); - - const result: Array = []; - for await (const res of new ParallelScanIterator( - mockDynamoDbClient as any, - { - TableName: 'foo', - TotalSegments: segments, - } - )) { - result.push(res); - } - - expect(result).toEqual([ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2']}, - baz: {L: [{BOOL: true}, {N: '3'}]} - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['4', '5']}, - baz: {L: [{BOOL: false}, {N: '6'}]} - }, - { - fizz: {S: 'pop'}, - bar: {NS: ['7', '8']}, - baz: {L: [{BOOL: true}, {N: '9'}]} - }, - { - fizz: {S: 'foo'}, - bar: {NS: ['10', '11']}, - baz: {L: [{BOOL: false}, {N: '12'}]} - }, - { - fizz: {S: 'bar'}, - bar: {NS: ['13', '14']}, - baz: {L: [{BOOL: true}, {N: '15'}]} - }, - { - fizz: {S: 'baz'}, - bar: {NS: ['16', '17']}, - baz: {L: [{BOOL: false}, {N: '18'}]} - }, - { - fizz: {S: 'quux'}, - bar: {NS: ['5', '12', '13']}, - baz: {L: [{BOOL: true}, {N: '101'}]} - }, - ]); - } - ); - - it('should provide access to paginator metadata', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - Count: 1, - ScannedCount:1, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - Count: 1, - ScannedCount: 2, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - Count: 1, - ScannedCount: 3, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const iterator = new ParallelScanIterator( - mockDynamoDbClient as any, - {TableName: 'foo', TotalSegments: 2} - ); - - for await (const _ of iterator) { - // pass - } - - expect(iterator.count).toBe(3); - expect(iterator.scannedCount).toBe(6); - expect(iterator.consumedCapacity).toEqual({ - TableName: 'foo', - CapacityUnits: 6 - }); - }); -}); diff --git a/packages/dynamodb-query-iterator/src/ParallelScanIterator.ts b/packages/dynamodb-query-iterator/src/ParallelScanIterator.ts deleted file mode 100644 index 89af7845..00000000 --- a/packages/dynamodb-query-iterator/src/ParallelScanIterator.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { ItemIterator } from './ItemIterator'; -import { ParallelScanInput } from './ParallelScanInput'; -import { - ParallelScanPaginator, - ParallelScanState, -} from './ParallelScanPaginator'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -export class ParallelScanIterator extends ItemIterator { - constructor( - client: DynamoDB, - input: ParallelScanInput, - scanState?: ParallelScanState - ) { - super(new ParallelScanPaginator(client, input, scanState)); - } -} diff --git a/packages/dynamodb-query-iterator/src/ParallelScanPaginator.spec.ts b/packages/dynamodb-query-iterator/src/ParallelScanPaginator.spec.ts deleted file mode 100644 index 49c28310..00000000 --- a/packages/dynamodb-query-iterator/src/ParallelScanPaginator.spec.ts +++ /dev/null @@ -1,390 +0,0 @@ -import { ParallelScanPaginator } from '.'; - -describe('ParallelScanPaginator', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - scan: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Items: []})); - mockDynamoDbClient.scan.mockClear(); - mockDynamoDbClient.scan.mockImplementation(() => { - return {promise: promiseFunc}; - }); - }); - - it( - 'should execute multiple requests in parallel when performing a scan with multiple segments', - async () => { - const segments = 2; - const keys = ['snap', 'crackle', 'pop', 'foo', 'bar', 'baz']; - let index = 0; - - // Ensure that the first promise won't resolve immediately. This - // would block progress on a sequential scan but should pose no - // problem for a parallel one. - promiseFunc.mockImplementationOnce(() => new Promise(resolve => { - setTimeout( - resolve.bind(null, { - Items: [ - { - fizz: {S: 'quux'}, - bar: {NS: ['5', '12', '13']}, - baz: {L: [{BOOL: true}, {N: '101'}]}, - }, - ], - }), - 50, - ); - } - )); - - // Enqueue a number of responses that will resolve synchronously - for (const key of keys) { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: key}, - bar: {NS: [ - (++index).toString(10), - (++index).toString(10), - ]}, - baz: {L: [ - {BOOL: index % 2 === 0}, - {N: (++index).toString(10)} - ]}, - }, - ], - LastEvaluatedKey: {fizz: {S: key}}, - })); - } - - // Enqueue a final page for this segment - promiseFunc.mockImplementationOnce(() => Promise.resolve({Items: []})); - - const result: Array = []; - for await (const res of new ParallelScanPaginator( - mockDynamoDbClient as any, - { - TableName: 'foo', - TotalSegments: segments, - }, - )) { - result.push(res); - } - - expect(result).toEqual([ - { - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2']}, - baz: {L: [{BOOL: true}, {N: '3'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}} - }, - { - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['4', '5']}, - baz: {L: [{BOOL: false}, {N: '6'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}} - }, - { - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['7', '8']}, - baz: {L: [{BOOL: true}, {N: '9'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}} - }, - { - Items: [ - { - fizz: {S: 'foo'}, - bar: {NS: ['10', '11']}, - baz: {L: [{BOOL: false}, {N: '12'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'foo'}} - }, - { - Items: [ - { - fizz: {S: 'bar'}, - bar: {NS: ['13', '14']}, - baz: {L: [{BOOL: true}, {N: '15'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'bar'}} - }, - { - Items: [ - { - fizz: {S: 'baz'}, - bar: {NS: ['16', '17']}, - baz: {L: [{BOOL: false}, {N: '18'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'baz'}} - }, - { - Items: [] - }, - { - Items: [ - { - fizz: {S: 'quux'}, - bar: {NS: ['5', '12', '13']}, - baz: {L: [{BOOL: true}, {N: '101'}]} - }, - ] - }, - ]); - } - ); - - it('should merge counts', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - Count: 1, - ScannedCount:1 - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - Count: 1, - ScannedCount: 2, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - Count: 1, - ScannedCount: 3 - })); - - const paginator = new ParallelScanPaginator( - mockDynamoDbClient as any, - { - TableName: 'foo', - TotalSegments: 2 - } - ); - - for await (const _ of paginator) { - // pass - } - - expect(paginator.count).toBe(3); - expect(paginator.scannedCount).toBe(6); - }); - - it('should merge consumed capacity reports', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - - const paginator = new ParallelScanPaginator( - mockDynamoDbClient as any, - { - TableName: 'foo', - TotalSegments: 2 - } - ); - - for await (const _ of paginator) { - // pass - } - expect(paginator.consumedCapacity).toEqual({ - TableName: 'foo', - CapacityUnits: 6 - }); - }); - - it( - 'should report the scan state even after ceasing iteration', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - - const paginator = new ParallelScanPaginator( - mockDynamoDbClient as any, - { - TableName: 'foo', - TotalSegments: 1 - } - ); - - for await (const _ of paginator) { - break; - } - - expect(paginator.scanState).toEqual([{ - initialized: true, - LastEvaluatedKey: {fizz: {S: 'snap'}} - }]); - } - ); - - it('should resume pagination when given a state object', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - })); - - const paginator = new ParallelScanPaginator( - mockDynamoDbClient as any, - { - TableName: 'foo', - TotalSegments: 1 - }, - [ - { - initialized: true, - LastEvaluatedKey: {fizz: {S: 'snap'}} - } - ] - ); - - for await (const _ of paginator) { - break - } - - expect(mockDynamoDbClient.scan.mock.calls).toEqual([ - [ - { - TableName: 'foo', - ExclusiveStartKey: {fizz: {S: 'snap'}}, - Segment: 0, - TotalSegments: 1, - } - ] - ]); - }); - - it('should yield nothing when given a finished state object', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - })); - - const paginator = new ParallelScanPaginator( - mockDynamoDbClient as any, - { - TableName: 'foo', - TotalSegments: 1 - }, - [ { initialized: true } ] - ); - - for await (const _ of paginator) { - throw new Error('This block should never have been entered'); - } - }); - - it( - 'should throw when a parallel scan paginator is created with a scan state with the wrong number of segments', - () => { - expect(() => new ParallelScanPaginator( - mockDynamoDbClient as any, - { - TableName: 'foo', - TotalSegments: 1 - }, - [ - { - initialized: true, - LastEvaluatedKey: {fizz: {S: 'snap'}} - }, - { - initialized: true, - LastEvaluatedKey: {fizz: {S: 'crackle'}} - } - ] - )).toThrow(); - } - ); -}); diff --git a/packages/dynamodb-query-iterator/src/ParallelScanPaginator.ts b/packages/dynamodb-query-iterator/src/ParallelScanPaginator.ts deleted file mode 100644 index 898bbcab..00000000 --- a/packages/dynamodb-query-iterator/src/ParallelScanPaginator.ts +++ /dev/null @@ -1,237 +0,0 @@ -import { DynamoDbPaginatorInterface } from './DynamoDbPaginatorInterface'; -import { DynamoDbResultsPage } from './DynamoDbResultsPage'; -import { mergeConsumedCapacities } from './mergeConsumedCapacities'; -import { ParallelScanInput } from './ParallelScanInput'; -import { ScanPaginator } from './ScanPaginator'; -import { ConsumedCapacity, Key } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -/** - * Pagination state for a scan segment for which the first page has not yet been - * retrieved. - */ -export interface UninitializedScanState { - initialized: false; - LastEvaluatedKey?: undefined; -} - -/** - * Pagination state for a scan segment for which one or more pages have been - * retrieved. If `LastEvaluatedKey` is defined, there are more pages to fetch; - * otherwise, all pages for this segment have been returned. - */ -export interface InitializedScanState { - initialized: true; - LastEvaluatedKey?: Key; -} - -export type ScanState = UninitializedScanState|InitializedScanState; - -/** - * ParallelScanState is represented as an array whose length is equal to the - * number of segments being scanned independently, with each segment's state - * being stored at the array index corresponding to its segment number. - * - * Segment state is represented with a tagged union with the following keys: - * - `initialized` -- whether the first page of results has been retrieved - * - `LastEvaluatedKey` -- the key to provide (if any) when requesting the - * next page of results. - * - * If `LastEvaluatedKey` is undefined and `initialized` is true, then all pages - * for the given segment have been returned. - */ -export type ParallelScanState = Array; - -if (Symbol && !Symbol.asyncIterator) { - (Symbol as any).asyncIterator = Symbol.for("__@@asyncIterator__"); -} - -export class ParallelScanPaginator implements DynamoDbPaginatorInterface { - private readonly _scanState: ParallelScanState; - private readonly iterators: Array; - private readonly pending: Array = []; - private lastResolved: Promise< - IteratorResult - > = Promise.resolve() as any; - - constructor( - client: DynamoDB, - input: ParallelScanInput, - scanState: ParallelScanState = nullScanState(input.TotalSegments) - ) { - const { TotalSegments } = input; - - if (scanState.length !== TotalSegments) { - throw new Error( - `Parallel scan state must have a length equal to the number of ` - + `scan segments. Expected an array of ${TotalSegments} but` - + `received an array with ${scanState.length} elements.` - ); - } - - this.iterators = new Array(TotalSegments); - for (let i = 0; i < TotalSegments; i++) { - const iterator = new ScanPaginator( - client, - { - ...input, - Segment: i, - ExclusiveStartKey: scanState[i].LastEvaluatedKey, - } - ); - this.iterators[i] = iterator; - - // If the segment has not been initialized or a pagination token has - // been received, request the next page. - if (!scanState[i].initialized || scanState[i].LastEvaluatedKey) { - this.refillPending(iterator, i); - } - } - - this._scanState = [...scanState]; - } - - /** - * @inheritDoc - */ - [Symbol.asyncIterator](): AsyncIterableIterator { - return this; - } - - /** - * @inheritDoc - */ - get consumedCapacity(): ConsumedCapacity|undefined { - return this.iterators.reduce( - (merged: ConsumedCapacity|undefined, paginator) => mergeConsumedCapacities( - merged, - paginator.consumedCapacity - ), - undefined - ) - } - - /** - * @inheritDoc - */ - get count(): number { - return this.iterators.reduce( - (sum, paginator) => sum + paginator.count, - 0 - ); - } - - /** - * @inheritDoc - */ - next(): Promise> { - this.lastResolved = this.lastResolved.then(() => this.getNext()); - return this.lastResolved; - } - - private async getNext(): Promise> { - if (this.pending.length === 0) { - return doneSigil(); - } - - // Grab the next available result from any segment. - const { - iterator, - result: {value, done}, - segment, - } = await Promise.race(this.pending.map(pending => pending.result)); - - // Update the scan state for this segment. This will either be the last - // evaluated key (for an unfinished segment) or undefined (for a - // completed segment). - this._scanState[segment] = { - initialized: true, - LastEvaluatedKey: value && value.LastEvaluatedKey, - }; - - // Remove the result from the pending set. - for (let i = this.pending.length - 1; i >= 0; i--) { - if (this.pending[i].iterator === iterator) { - this.pending.splice(i, 1); - } - } - - // If the iterator is not finished, add its next result to the pending - // set. - if (!done) { - this.refillPending(iterator, segment); - return { value, done }; - } else { - // If a segment has finished but there are still outstanding - // requests, recur. A done sigil will be returned when the pending - // queue is empty. - return this.getNext(); - } - } - - /** - * @inheritDoc - */ - async return(): Promise> { - this.pending.length = 0; - return Promise.all(this.iterators.map(iterator => iterator.return())) - .then(doneSigil); - } - - /** - * @inheritDoc - */ - get scannedCount(): number { - return this.iterators.reduce( - (sum, paginator) => sum + paginator.scannedCount, - 0 - ); - } - - /** - * A snapshot of the current state of a parallel scan. May be used to resume - * a parallel scan with a separate paginator. - */ - get scanState(): ParallelScanState { - return [...this._scanState]; - } - - private refillPending(iterator: ScanPaginator, segment: number): void { - // Use .push to reorder segments within the array of pending results. - // Promise.race will iterate over the array of pending results until a - // resolved promise is found and therefore will naturally favor promises - // towards the head of the queue. Removing resolved segments and sending - // them to the back of the line will keep this implementation detail - // from creating hot and cold scan segments. - this.pending.push({ - iterator: iterator, - result: iterator.next() - .then(result => ({iterator, result, segment})), - }); - } -} - -function doneSigil() { - return {done: true} as IteratorResult; -} - -/** - * `Array.prototype.fill` is not available in IE, so a loop is used instead - */ -function nullScanState(length: number): ParallelScanState { - const target: ParallelScanState = new Array(length); - for (let i = 0; i < length; i++) { - target[i] = {initialized: false}; - } - - return target; -} - -interface PendingResult { - iterator: ScanPaginator; - result: Promise<{ - iterator: ScanPaginator; - result: IteratorResult; - segment: number; - }>; -} diff --git a/packages/dynamodb-query-iterator/src/QueryIterator.spec.ts b/packages/dynamodb-query-iterator/src/QueryIterator.spec.ts deleted file mode 100644 index bfa7ff61..00000000 --- a/packages/dynamodb-query-iterator/src/QueryIterator.spec.ts +++ /dev/null @@ -1,185 +0,0 @@ -import {QueryIterator, QueryPaginator} from '.'; - -describe('QueryIterator', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - query: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Items: []})); - mockDynamoDbClient.query.mockClear(); - mockDynamoDbClient.query.mockImplementation(() => { - return {promise: promiseFunc}; - }); - }); - - it( - 'should paginate over results and return a promise for each item', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const result: any[] = []; - for await (const item of new QueryIterator(mockDynamoDbClient as any, {TableName: 'foo'})) { - result.push(item); - } - - expect(result).toEqual([ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ]); - } - ); - - it('should provide access to the underlying paginator', async () => { - const iterator = new QueryIterator(mockDynamoDbClient as any, {TableName: 'foo'}); - - expect(iterator.pages()).toBeInstanceOf(QueryPaginator); - }); - - it('should not allow iteration once the paginator has been detached', async () => { - const iterator = new QueryIterator(mockDynamoDbClient as any, {TableName: 'foo'}); - - // detach the paginator - iterator.pages(); - - await expect(iterator.next()).rejects.toMatchObject(new Error( - 'The underlying paginator has been detached from this iterator.' - )); - }); - - it('should provide access to paginator metadata', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - Count: 1, - ScannedCount:1, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - Count: 1, - ScannedCount: 2, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - Count: 1, - ScannedCount: 3, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - - const iterator = new QueryIterator(mockDynamoDbClient as any, {TableName: 'foo'}); - - let expectedCount = 0; - const expectedScanCounts = [1, 3, 6]; - expect(iterator.count).toBe(expectedCount); - expect(iterator.scannedCount).toBe(expectedCount); - for await (const _ of iterator) { - expect(iterator.count).toBe(++expectedCount); - expect(iterator.scannedCount).toBe(expectedScanCounts.shift()); - } - - expect(iterator.count).toBe(3); - expect(iterator.scannedCount).toBe(6); - expect(iterator.consumedCapacity).toEqual({ - TableName: 'foo', - CapacityUnits: 6 - }); - }); - - it('should not allow iteration once return has been called', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - const iterator = new QueryIterator(mockDynamoDbClient as any, {TableName: 'foo'}); - - for await (const _ of iterator) { - break - } - - await expect(iterator.next()).rejects.toMatchObject(new Error( - 'Iteration has been manually interrupted and may not be resumed' - )); - }); -}); diff --git a/packages/dynamodb-query-iterator/src/QueryIterator.ts b/packages/dynamodb-query-iterator/src/QueryIterator.ts deleted file mode 100644 index fade8a04..00000000 --- a/packages/dynamodb-query-iterator/src/QueryIterator.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { ItemIterator } from './ItemIterator'; -import { QueryPaginator } from './QueryPaginator'; -import { QueryInput } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -export class QueryIterator extends ItemIterator { - constructor(client: DynamoDB, input: QueryInput, limit?: number) { - super(new QueryPaginator(client, input, limit)); - } -} diff --git a/packages/dynamodb-query-iterator/src/QueryPaginator.spec.ts b/packages/dynamodb-query-iterator/src/QueryPaginator.spec.ts deleted file mode 100644 index 20235eef..00000000 --- a/packages/dynamodb-query-iterator/src/QueryPaginator.spec.ts +++ /dev/null @@ -1,350 +0,0 @@ -import { QueryPaginator } from '.'; - -describe('QueryPaginator', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - query: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Items: []})); - mockDynamoDbClient.query.mockClear(); - mockDynamoDbClient.query.mockImplementation(() => { - return {promise: promiseFunc}; - }); - }); - - it( - 'should paginate over results and return a promise for each item', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const result: any[] = []; - for await (const res of new QueryPaginator(mockDynamoDbClient as any, {TableName: 'foo'})) { - result.push(...res.Items || []); - } - - expect(result).toEqual([ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ]); - } - ); - - it('should fetch up to $limit records', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - - const paginator = new QueryPaginator(mockDynamoDbClient as any, {TableName: 'foo'}, 2); - const result: any[] = []; - for await (const res of paginator) { - result.push(...res.Items || []); - } - - expect(result).toEqual([ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - } - ]); - - expect(paginator.lastEvaluatedKey).toEqual({fizz: {S: 'crackle'}}); - }); - - it('should not request a page size that will exceed $limit', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const paginator = new QueryPaginator(mockDynamoDbClient as any, {TableName: 'foo'}, 3); - for await (const _ of paginator) { - // pass - } - - expect(mockDynamoDbClient.query.mock.calls).toEqual([ - [{TableName: 'foo', Limit: 3}], - [{ - TableName: 'foo', - Limit: 1, - ExclusiveStartKey: {fizz: {S: 'crackle'}} - }], - ]); - }); - - it('should provide access to the last evaluated key', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const paginator = new QueryPaginator(mockDynamoDbClient as any, {TableName: 'foo'}); - const expectedLastKeys = [ - {fizz: {S: 'snap'}}, - {fizz: {S: 'crackle'}}, - {fizz: {S: 'pop'}}, - ]; - - for await (const _ of paginator) { - expect(paginator.lastEvaluatedKey).toEqual(expectedLastKeys.shift()); - } - - expect(paginator.lastEvaluatedKey).toBeUndefined(); - }); - - it('should merge counts', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - Count: 1, - ScannedCount:1 - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - Count: 1, - ScannedCount: 2, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - Count: 1, - ScannedCount: 3 - })); - - const paginator = new QueryPaginator(mockDynamoDbClient as any, {TableName: 'foo'}); - - let expectedCount = 0; - const expectedScanCounts = [1, 3, 6]; - expect(paginator.count).toBe(expectedCount); - expect(paginator.scannedCount).toBe(expectedCount); - for await (const _ of paginator) { - expect(paginator.count).toBe(++expectedCount); - expect(paginator.scannedCount).toBe(expectedScanCounts.shift()); - } - - expect(paginator.count).toBe(3); - expect(paginator.scannedCount).toBe(6); - }); - - it('should merge consumed capacity reports', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - - const paginator = new QueryPaginator(mockDynamoDbClient as any, {TableName: 'foo'}); - - for await (const _ of paginator) { - // pass - } - expect(paginator.consumedCapacity).toEqual({ - TableName: 'foo', - CapacityUnits: 6 - }); - }); - - it( - 'should report the last evaluated key even after ceasing iteration', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - let i = 0; - const paginator = new QueryPaginator(mockDynamoDbClient as any, {TableName: 'foo'}); - for await (const _ of paginator) { - if (++i > 1) { - break; - } - } - - expect(paginator.lastEvaluatedKey).toEqual({fizz: {S: 'crackle'}}); - } - ); -}); diff --git a/packages/dynamodb-query-iterator/src/QueryPaginator.ts b/packages/dynamodb-query-iterator/src/QueryPaginator.ts deleted file mode 100644 index ac73cd3a..00000000 --- a/packages/dynamodb-query-iterator/src/QueryPaginator.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { DynamoDbPaginator } from './DynamoDbPaginator'; -import { DynamoDbResultsPage } from './DynamoDbResultsPage'; -import { QueryInput } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -export class QueryPaginator extends DynamoDbPaginator { - private nextRequest?: QueryInput; - - constructor( - private readonly client: DynamoDB, - input: QueryInput, - limit?: number - ) { - super(limit); - this.nextRequest = {...input}; - } - - protected getNext(): Promise> { - if (this.nextRequest) { - return this.client.query({ - ...this.nextRequest, - Limit: this.getNextPageSize(this.nextRequest.Limit) - }) - .promise() - .then(output => { - if (this.nextRequest && output.LastEvaluatedKey) { - this.nextRequest = { - ...this.nextRequest, - ExclusiveStartKey: output.LastEvaluatedKey - }; - } else { - this.nextRequest = undefined; - } - - return Promise.resolve({ - value: output, - done: false - }); - }); - } - - return Promise.resolve( - {done: true} as IteratorResult - ); - } -} diff --git a/packages/dynamodb-query-iterator/src/ScanIterator.spec.ts b/packages/dynamodb-query-iterator/src/ScanIterator.spec.ts deleted file mode 100644 index b9759f3f..00000000 --- a/packages/dynamodb-query-iterator/src/ScanIterator.spec.ts +++ /dev/null @@ -1,146 +0,0 @@ -import { ScanIterator } from '.'; - -describe('ScanIterator', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - scan: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Items: []})); - mockDynamoDbClient.scan.mockClear(); - mockDynamoDbClient.scan.mockImplementation(() => { - return {promise: promiseFunc}; - }); - }); - - it( - 'should paginate over results and return a promise for each item', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const result: any[] = []; - for await (const item of new ScanIterator(mockDynamoDbClient as any, {TableName: 'foo'})) { - result.push(item); - } - - expect(result).toEqual([ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ]); - } - ); - - it('should provide access to paginator metadata', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - Count: 1, - ScannedCount:1, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - Count: 1, - ScannedCount: 2, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - Count: 1, - ScannedCount: 3, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - - const iterator = new ScanIterator(mockDynamoDbClient as any, {TableName: 'foo'}); - - let expectedCount = 0; - const expectedScanCounts = [1, 3, 6]; - expect(iterator.count).toBe(expectedCount); - expect(iterator.scannedCount).toBe(expectedCount); - for await (const _ of iterator) { - expect(iterator.count).toBe(++expectedCount); - expect(iterator.scannedCount).toBe(expectedScanCounts.shift()); - } - - expect(iterator.count).toBe(3); - expect(iterator.scannedCount).toBe(6); - expect(iterator.consumedCapacity).toEqual({ - TableName: 'foo', - CapacityUnits: 6 - }); - }); -}); diff --git a/packages/dynamodb-query-iterator/src/ScanIterator.ts b/packages/dynamodb-query-iterator/src/ScanIterator.ts deleted file mode 100644 index 335c6800..00000000 --- a/packages/dynamodb-query-iterator/src/ScanIterator.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { ItemIterator } from './ItemIterator'; -import { ScanPaginator } from './ScanPaginator'; -import { ScanInput } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -export class ScanIterator extends ItemIterator { - constructor(client: DynamoDB, input: ScanInput, limit?: number) { - super(new ScanPaginator(client, input, limit)); - } -} diff --git a/packages/dynamodb-query-iterator/src/ScanPaginator.spec.ts b/packages/dynamodb-query-iterator/src/ScanPaginator.spec.ts deleted file mode 100644 index 5e7a9acc..00000000 --- a/packages/dynamodb-query-iterator/src/ScanPaginator.spec.ts +++ /dev/null @@ -1,278 +0,0 @@ -import { ScanPaginator } from '.'; - -describe('ScanPaginator', () => { - const promiseFunc = jest.fn(); - const mockDynamoDbClient = { - config: {}, - scan: jest.fn() - }; - - beforeEach(() => { - promiseFunc.mockClear(); - promiseFunc.mockImplementation(() => Promise.resolve({Items: []})); - mockDynamoDbClient.scan.mockClear(); - mockDynamoDbClient.scan.mockImplementation(() => { - return {promise: promiseFunc}; - }); - }); - - it( - 'should paginate over results and return a promise for each item', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const result: any[] = []; - for await (const res of new ScanPaginator(mockDynamoDbClient as any, {TableName: 'foo'})) { - result.push(...res.Items || []); - } - - expect(result).toEqual([ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ]); - } - ); - - it('should provide access to the last evaluated key', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - const paginator = new ScanPaginator(mockDynamoDbClient as any, {TableName: 'foo'}); - const expectedLastKeys = [ - {fizz: {S: 'snap'}}, - {fizz: {S: 'crackle'}}, - {fizz: {S: 'pop'}}, - ]; - - for await (const _ of paginator) { - expect(paginator.lastEvaluatedKey).toEqual(expectedLastKeys.shift()); - } - - expect(paginator.lastEvaluatedKey).toBeUndefined(); - }); - - it('should merge counts', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - Count: 1, - ScannedCount:1 - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - Count: 1, - ScannedCount: 2, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - Count: 1, - ScannedCount: 3 - })); - - const paginator = new ScanPaginator(mockDynamoDbClient as any, {TableName: 'foo'}); - - let expectedCount = 0 - const expectedScanCounts = [1, 3, 6]; - expect(paginator.count).toBe(expectedCount); - expect(paginator.scannedCount).toBe(expectedCount); - for await (const _ of paginator) { - expect(paginator.count).toBe(++expectedCount); - expect(paginator.scannedCount).toBe(expectedScanCounts.shift()!); - } - - expect(paginator.count).toBe(3); - expect(paginator.scannedCount).toBe(6); - }); - - it('should merge consumed capacity reports', async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - ConsumedCapacity: { - TableName: 'foo', - CapacityUnits: 2 - } - })); - - const paginator = new ScanPaginator(mockDynamoDbClient as any, {TableName: 'foo'}); - - for await (const _ of paginator) { - // pass - } - expect(paginator.consumedCapacity).toEqual({ - TableName: 'foo', - CapacityUnits: 6 - }); - }); - - it( - 'should report the last evaluated key even after ceasing iteration', - async () => { - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'snap'}, - bar: {NS: ['1', '2', '3']}, - baz: {L: [{BOOL: true}, {N: '4'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'snap'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'crackle'}, - bar: {NS: ['5', '6', '7']}, - baz: {L: [{BOOL: false}, {N: '8'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'crackle'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({ - Items: [ - { - fizz: {S: 'pop'}, - bar: {NS: ['9', '12', '30']}, - baz: {L: [{BOOL: true}, {N: '24'}]} - }, - ], - LastEvaluatedKey: {fizz: {S: 'pop'}}, - })); - promiseFunc.mockImplementationOnce(() => Promise.resolve({})); - - let i = 0; - const paginator = new ScanPaginator(mockDynamoDbClient as any, {TableName: 'foo'}); - for await (const _ of paginator) { - if (++i > 1) { - break; - } - } - - expect(paginator.lastEvaluatedKey).toEqual({fizz: {S: 'crackle'}}); - } - ); -}); diff --git a/packages/dynamodb-query-iterator/src/ScanPaginator.ts b/packages/dynamodb-query-iterator/src/ScanPaginator.ts deleted file mode 100644 index e46b659c..00000000 --- a/packages/dynamodb-query-iterator/src/ScanPaginator.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { DynamoDbPaginator } from './DynamoDbPaginator'; -import { DynamoDbResultsPage } from './DynamoDbResultsPage'; -import { ScanInput } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); - -export class ScanPaginator extends DynamoDbPaginator { - private nextRequest?: ScanInput; - - constructor( - private readonly client: DynamoDB, - input: ScanInput, - limit?: number - ) { - super(limit); - this.nextRequest = { - ...input, - Limit: this.getNextPageSize(input.Limit), - }; - } - - protected getNext(): Promise> { - if (this.nextRequest) { - return this.client.scan({ - ...this.nextRequest, - Limit: this.getNextPageSize(this.nextRequest.Limit) - }) - .promise() - .then(output => { - if (this.nextRequest && output.LastEvaluatedKey) { - this.nextRequest = { - ...this.nextRequest, - ExclusiveStartKey: output.LastEvaluatedKey - }; - } else { - this.nextRequest = undefined; - } - - return Promise.resolve({ - value: output, - done: false - }); - }); - } - - return Promise.resolve( - {done: true} as IteratorResult - ); - } -} diff --git a/packages/dynamodb-query-iterator/src/index.ts b/packages/dynamodb-query-iterator/src/index.ts deleted file mode 100644 index 8b4b09f0..00000000 --- a/packages/dynamodb-query-iterator/src/index.ts +++ /dev/null @@ -1,8 +0,0 @@ -export * from './DynamoDbPaginatorInterface'; -export * from './ParallelScanInput'; -export * from './ParallelScanIterator'; -export * from './ParallelScanPaginator'; -export * from './QueryIterator'; -export * from './QueryPaginator'; -export * from './ScanIterator'; -export * from './ScanPaginator'; diff --git a/packages/dynamodb-query-iterator/src/mergeConsumedCapacities.spec.ts b/packages/dynamodb-query-iterator/src/mergeConsumedCapacities.spec.ts deleted file mode 100644 index a78b288f..00000000 --- a/packages/dynamodb-query-iterator/src/mergeConsumedCapacities.spec.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { mergeConsumedCapacities } from './mergeConsumedCapacities'; -import { ConsumedCapacity } from 'aws-sdk/clients/dynamodb'; - -describe('mergeConsumedCapacities', () => { - it('should return undefined when called two undefined arguments', () => { - expect(mergeConsumedCapacities(void 0, void 0)).toBeUndefined(); - }); - - it('should throw when called with capacities from two different tables', () => { - expect( - () => mergeConsumedCapacities({TableName: 'foo'}, {TableName: 'bar'}) - ).toThrow(); - }); - - it( - 'should return a clone of the first argument when the second is undefined', - () => { - const capacity: ConsumedCapacity = { - TableName: 'foo', - CapacityUnits: 2, - Table: { - CapacityUnits: 4, - }, - LocalSecondaryIndexes: { - foo: { - CapacityUnits: 6 - } - }, - GlobalSecondaryIndexes: { - bar: { - CapacityUnits: 8 - } - } - }; - const merged = mergeConsumedCapacities(capacity, void 0); - expect(merged).toEqual(capacity); - expect(merged).not.toBe(capacity); - } - ); - - it( - 'should return a clone of the second argument when the first is undefined', - () => { - const capacity: ConsumedCapacity = { - TableName: 'foo', - CapacityUnits: 2, - Table: { - CapacityUnits: 4, - }, - LocalSecondaryIndexes: { - foo: { - CapacityUnits: 6 - } - }, - GlobalSecondaryIndexes: { - bar: { - CapacityUnits: 8 - } - } - }; - const merged = mergeConsumedCapacities(void 0, capacity); - expect(merged).toEqual(capacity); - expect(merged).not.toBe(capacity); - } - ); - - it( - 'should return a clone of the first argument when the second is undefined', - () => { - const a: ConsumedCapacity = { - TableName: 'foo', - CapacityUnits: 2, - Table: { - CapacityUnits: 4, - }, - LocalSecondaryIndexes: { - foo: { - CapacityUnits: 6 - }, - fizz: { - CapacityUnits: 2 - } - }, - GlobalSecondaryIndexes: { - bar: { - CapacityUnits: 8 - }, - buzz: { - CapacityUnits: 2 - } - } - }; - const b: ConsumedCapacity = { - TableName: 'foo', - CapacityUnits: 2, - Table: { - CapacityUnits: 4, - }, - LocalSecondaryIndexes: { - foo: { - CapacityUnits: 6 - }, - snap: { - CapacityUnits: 2 - } - }, - GlobalSecondaryIndexes: { - bar: { - CapacityUnits: 8 - }, - crackle: { - CapacityUnits: 2 - } - } - }; - - expect(mergeConsumedCapacities(a, b)).toEqual({ - TableName: 'foo', - CapacityUnits: a.CapacityUnits! + b.CapacityUnits!, - Table: { - CapacityUnits: a.Table!.CapacityUnits! + b.Table!.CapacityUnits!, - }, - LocalSecondaryIndexes: { - foo: { - CapacityUnits: a.LocalSecondaryIndexes!.foo.CapacityUnits! - + b.LocalSecondaryIndexes!.foo.CapacityUnits! - }, - fizz: { - CapacityUnits: a.LocalSecondaryIndexes!.fizz.CapacityUnits - }, - snap: { - CapacityUnits: b.LocalSecondaryIndexes!.snap.CapacityUnits - } - }, - GlobalSecondaryIndexes: { - bar: { - CapacityUnits: a.GlobalSecondaryIndexes!.bar.CapacityUnits! - + b.GlobalSecondaryIndexes!.bar.CapacityUnits! - }, - buzz: { - CapacityUnits: a.GlobalSecondaryIndexes!.buzz.CapacityUnits - }, - crackle: { - CapacityUnits: b.GlobalSecondaryIndexes!.crackle.CapacityUnits - } - } - - }); - } - ); -}); diff --git a/packages/dynamodb-query-iterator/src/mergeConsumedCapacities.ts b/packages/dynamodb-query-iterator/src/mergeConsumedCapacities.ts deleted file mode 100644 index 1a669168..00000000 --- a/packages/dynamodb-query-iterator/src/mergeConsumedCapacities.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { - Capacity, - ConsumedCapacity, - SecondaryIndexesCapacityMap, -} from 'aws-sdk/clients/dynamodb'; - -/** - * @internal - */ -export function mergeConsumedCapacities( - a?: ConsumedCapacity, - b?: ConsumedCapacity -): ConsumedCapacity|undefined { - if (a || b) { - a = a || {}; - b = b || {}; - - if ((a.TableName && b.TableName) && a.TableName !== b.TableName) { - throw new Error( - 'Consumed capacity reports may only be merged if they describe the same table' - ); - } - - return { - TableName: a.TableName || b.TableName, - CapacityUnits: (a.CapacityUnits || 0) + (b.CapacityUnits || 0), - Table: mergeCapacities(a.Table, b.Table), - LocalSecondaryIndexes: mergeCapacityMaps( - a.LocalSecondaryIndexes, - b.LocalSecondaryIndexes - ), - GlobalSecondaryIndexes: mergeCapacityMaps( - a.GlobalSecondaryIndexes, - b.GlobalSecondaryIndexes - ), - } - } -} - -function mergeCapacities(a?: Capacity, b?: Capacity): Capacity|undefined { - if (a || b) { - return { - CapacityUnits: ((a && a.CapacityUnits) || 0) + - ((b && b.CapacityUnits) || 0), - }; - } -} - -function mergeCapacityMaps( - a?: SecondaryIndexesCapacityMap, - b?: SecondaryIndexesCapacityMap -): SecondaryIndexesCapacityMap|undefined { - if (a || b) { - const out: SecondaryIndexesCapacityMap = {}; - - a = a || {}; - b = b || {}; - const keys = new Set(); - for (const map of [a, b]) { - for (const indexName of Object.keys(map)) { - keys.add(indexName); - } - } - - for (const key of keys) { - out[key] = mergeCapacities(a[key], b[key])!; - } - - return out; - } -} diff --git a/packages/dynamodb-query-iterator/tsconfig.json b/packages/dynamodb-query-iterator/tsconfig.json deleted file mode 100644 index 05d818f8..00000000 --- a/packages/dynamodb-query-iterator/tsconfig.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": [ - "es5", - "es2015.iterable", - "es2015.promise", - "es2015.collection", - "es2015.symbol.wellknown", - "esnext.asynciterable" - ], - "downlevelIteration": true, - "importHelpers": true, - "module": "commonjs", - "noUnusedLocals": true, - "strict": true, - "declaration": true, - "sourceMap": true, - "rootDir": "./src", - "outDir": "./build" - }, - "typedocOptions": { - "mode": "file", - "out": "../../docs/packages/dynamodb-query-iterator", - "excludeNotExported": true, - "excludePrivate": true, - "hideGenerator": true - } -} diff --git a/packages/dynamodb-query-iterator/tsconfig.test.json b/packages/dynamodb-query-iterator/tsconfig.test.json deleted file mode 100644 index 57f7d5b1..00000000 --- a/packages/dynamodb-query-iterator/tsconfig.test.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "sourceMap": false, - "inlineSourceMap": true, - "inlineSources": true, - "rootDir": "./src", - "outDir": "./build" - } -} diff --git a/packages/dynamodb-batch-iterator/src/BatchGet.spec.ts b/src/BatchGet.spec.ts similarity index 100% rename from packages/dynamodb-batch-iterator/src/BatchGet.spec.ts rename to src/BatchGet.spec.ts diff --git a/packages/dynamodb-batch-iterator/src/BatchGet.ts b/src/BatchGet.ts similarity index 100% rename from packages/dynamodb-batch-iterator/src/BatchGet.ts rename to src/BatchGet.ts diff --git a/packages/dynamodb-batch-iterator/src/BatchGetOptions.ts b/src/BatchGetOptions.ts similarity index 100% rename from packages/dynamodb-batch-iterator/src/BatchGetOptions.ts rename to src/BatchGetOptions.ts diff --git a/src/BatchOperation.ts b/src/BatchOperation.ts new file mode 100644 index 00000000..470ec578 --- /dev/null +++ b/src/BatchOperation.ts @@ -0,0 +1,243 @@ +import { + BatchState, + SyncOrAsyncIterable, + TableState, + TableStateElement, + ThrottledTableConfiguration, +} from "./types"; +import DynamoDB = require("aws-sdk/clients/dynamodb"); + +if (Symbol && !Symbol.asyncIterator) { + (Symbol as any).asyncIterator = Symbol.for("__@@asyncIterator__"); +} + +export abstract class BatchOperation + implements AsyncIterableIterator<[string, Element]> +{ + /** + * The maximum number of elements that may be included in a single batch. + */ + protected abstract readonly batchSize: number; + + /** + * Items that have been retrieved and are ready to be returned. + */ + protected readonly pending: Array<[string, Element]> = []; + + /** + * A mapping of table names to table-specific operation state (e.g., the + * number of throttling events experienced, etc.) + */ + protected readonly state: BatchState = {}; + + /** + * Input elements that are prepared for immediate dispatch + */ + protected readonly toSend: Array<[string, Element]> = []; + + private readonly throttled = new Set< + Promise> + >(); + private readonly iterator: + | Iterator<[string, Element]> + | AsyncIterator<[string, Element]>; + private sourceDone: boolean = false; + private sourceNext: + | IteratorResult<[string, Element]> + | Promise>; + private lastResolved?: Promise>; + + /** + * @param client The AWS SDK client with which to communicate with + * DynamoDB. + * @param items A synchronous or asynchronous iterable of tuples + * describing the operations to execute. The first member + * of the tuple should be the name of the table targeted by + * the operation. + */ + constructor( + protected readonly client: DynamoDB, + items: SyncOrAsyncIterable<[string, Element]> + ) { + if (isIterable(items)) { + this.iterator = items[Symbol.iterator](); + } else { + this.iterator = items[Symbol.asyncIterator](); + } + this.sourceNext = this.iterator.next(); + } + + next(): Promise> { + if (this.lastResolved) { + this.lastResolved = this.lastResolved.then(() => this.getNext()); + } else { + this.lastResolved = this.getNext(); + } + + return this.lastResolved; + } + + [Symbol.asyncIterator]() { + return this; + } + + /** + * Execute a single batch request and process the result. + */ + protected abstract doBatchRequest(): Promise; + + /** + * Create and return the initial state object for a given DynamoDB table. + * + * @param tableName The name of the table whose initial state should be + * returned. + */ + protected getInitialTableState(tableName: string): TableState { + return { + backoffFactor: 0, + name: tableName, + }; + } + + /** + * Accept an array of unprocessed items belonging to a single table and + * re-enqueue it for submission, making sure the appropriate level of + * backoff is applied to future operations on the same table. + * + * @param tableName The table to which the unprocessed elements belong. + * @param unprocessed Elements returned by DynamoDB as not yet processed. + * The elements should not be unmarshalled, but they + * should be reverted to the form used for elements + * that have not yet been sent. + */ + protected handleThrottled( + tableName: string, + unprocessed: Array + ): void { + const tableState = this.state[tableName]; + tableState.backoffFactor++; + + if (tableState.tableThrottling) { + this.throttled.delete(tableState.tableThrottling.backoffWaiter); + unprocessed.unshift(...tableState.tableThrottling.unprocessed); + } + + tableState.tableThrottling = { + unprocessed, + backoffWaiter: new Promise((resolve) => { + setTimeout( + resolve, + exponentialBackoff(tableState.backoffFactor), + tableState + ); + }), + }; + + this.throttled.add(tableState.tableThrottling.backoffWaiter); + } + + /** + * Iterate over all pending writes and move those targeting throttled tables + * into the throttled queue. + * + * @param unprocessedTables A set of tables for which some items were + * returned without being processed. + */ + protected movePendingToThrottled(unprocessedTables: Set) { + for (let i = this.toSend.length - 1; i > -1; i--) { + const [table, attributes] = this.toSend[i]; + if (unprocessedTables.has(table)) { + ( + this.state[table] as ThrottledTableConfiguration + ).tableThrottling.unprocessed.push(attributes); + this.toSend.splice(i, 1); + } + } + } + + private addToSendQueue([tableName, attributes]: [string, Element]): void { + if (!this.state[tableName]) { + this.state[tableName] = this.getInitialTableState(tableName); + } + const tableState = this.state[tableName]; + + if (tableState.tableThrottling) { + tableState.tableThrottling.unprocessed.push(attributes); + } else { + this.toSend.push([tableName, attributes]); + } + } + + private enqueueThrottled(table: ThrottledTableConfiguration): void { + const { + tableThrottling: { backoffWaiter, unprocessed }, + } = table; + if (unprocessed.length > 0) { + this.toSend.push( + ...unprocessed.map((attr) => [table.name, attr] as [string, Element]) + ); + } + + this.throttled.delete(backoffWaiter); + delete table.tableThrottling; + } + + private async getNext(): Promise> { + if ( + this.sourceDone && + this.pending.length === 0 && + this.toSend.length === 0 && + this.throttled.size === 0 + ) { + return { done: true } as IteratorResult<[string, Element]>; + } + + if (this.pending.length > 0) { + return { + done: false, + value: this.pending.shift() as [string, Element], + }; + } + + await this.refillPending(); + return this.getNext(); + } + + private async refillPending() { + while (!this.sourceDone && this.toSend.length < this.batchSize) { + const toProcess = isIteratorResult(this.sourceNext) + ? this.sourceNext + : await Promise.race([this.sourceNext, Promise.race(this.throttled)]); + + if (isIteratorResult(toProcess)) { + this.sourceDone = toProcess.done; + if (!this.sourceDone) { + this.addToSendQueue(toProcess.value); + this.sourceNext = this.iterator.next(); + } + } else { + this.enqueueThrottled(toProcess); + } + } + + while (this.toSend.length < this.batchSize && this.throttled.size > 0) { + this.enqueueThrottled(await Promise.race(this.throttled)); + } + + if (this.toSend.length > 0) { + await this.doBatchRequest(); + } + } +} + +function exponentialBackoff(attempts: number) { + return Math.floor(Math.random() * Math.pow(2, attempts)); +} + +function isIterable(arg: any): arg is Iterable { + return Boolean(arg) && typeof arg[Symbol.iterator] === "function"; +} + +function isIteratorResult(arg: any): arg is IteratorResult { + return Boolean(arg) && typeof arg.done === "boolean"; +} diff --git a/packages/dynamodb-batch-iterator/src/BatchWrite.spec.ts b/src/BatchWrite.spec.ts similarity index 100% rename from packages/dynamodb-batch-iterator/src/BatchWrite.spec.ts rename to src/BatchWrite.spec.ts diff --git a/packages/dynamodb-batch-iterator/src/BatchWrite.ts b/src/BatchWrite.ts similarity index 100% rename from packages/dynamodb-batch-iterator/src/BatchWrite.ts rename to src/BatchWrite.ts diff --git a/src/index.ts b/src/index.ts index e69de29b..f067865d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -0,0 +1,4 @@ +export * from './BatchGet'; +export * from './BatchGetOptions'; +export * from './BatchWrite'; +export * from './types'; diff --git a/packages/dynamodb-batch-iterator/src/itemIdentifier.spec.ts b/src/itemIdentifier.spec.ts similarity index 100% rename from packages/dynamodb-batch-iterator/src/itemIdentifier.spec.ts rename to src/itemIdentifier.spec.ts diff --git a/packages/dynamodb-batch-iterator/src/itemIdentifier.ts b/src/itemIdentifier.ts similarity index 100% rename from packages/dynamodb-batch-iterator/src/itemIdentifier.ts rename to src/itemIdentifier.ts diff --git a/src/types.ts b/src/types.ts new file mode 100644 index 00000000..04d83093 --- /dev/null +++ b/src/types.ts @@ -0,0 +1,68 @@ +import { + AttributeMap, + ConsistentRead, + DeleteRequest, + ExpressionAttributeNameMap, + ProjectionExpression, + PutRequest, + WriteRequest as DynamoDbWriteRequest, +} from "aws-sdk/clients/dynamodb"; + +/** + * A synchronous or asynchronous iterable. + */ +export type SyncOrAsyncIterable = Iterable | AsyncIterable; + +/** + * @internal + */ +export interface BatchState { + [tableName: string]: TableState; +} + +/** + * @internal + */ +export interface TableState { + attributeNames?: ExpressionAttributeNameMap; + backoffFactor: number; + consistentRead?: ConsistentRead; + name: string; + projection?: ProjectionExpression; + tableThrottling?: TableThrottlingTracker; +} + +/** + * @internal + */ +export type TableStateElement = AttributeMap | WriteRequest; + +/** + * @internal + */ +export interface TableThrottlingTracker { + backoffWaiter: Promise>; + unprocessed: Array; +} + +/** + * @internal + */ +export interface ThrottledTableConfiguration + extends TableState { + tableThrottling?: TableThrottlingTracker; +} + +/** + * A write request for which exactly one of the `PutRequest` and `DeleteRequest` + * properties has been defined. + */ +export type WriteRequest = + | (DynamoDbWriteRequest & { + PutRequest: PutRequest; + DeleteRequest?: undefined; + }) + | (DynamoDbWriteRequest & { + DeleteRequest: DeleteRequest; + PutRequest?: undefined; + }); diff --git a/tsconfig.json b/tsconfig.json index ae045031..a8ccf890 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -2,18 +2,28 @@ "compilerOptions": { "target": "es5", "lib": [ - "dom", - "es5" + "es5", + "es2015.iterable", + "es2015.promise", + "es2015.collection", + "es2015.symbol.wellknown", + "esnext.asynciterable" ], + "downlevelIteration": true, + "importHelpers": true, "module": "commonjs", - "strict": true + "noUnusedLocals": true, + "strict": true, + "declaration": true, + "sourceMap": true, + "rootDir": "./src", + "outDir": "./build" }, "typedocOptions": { "mode": "file", "out": "./docs", "excludeNotExported": true, "excludePrivate": true, - "hideGenerator": true, - "name": "Amazon DynamoDB DataMapper For JavaScript" + "hideGenerator": true } } diff --git a/packages/dynamodb-batch-iterator/tsconfig.test.json b/tsconfig.test.json similarity index 100% rename from packages/dynamodb-batch-iterator/tsconfig.test.json rename to tsconfig.test.json From bc6be26d2faf056b96c44e07a432cd804faa7082 Mon Sep 17 00:00:00 2001 From: bas-d <7903735+bas-d@users.noreply.github.com> Date: Thu, 2 Dec 2021 18:27:22 -0600 Subject: [PATCH 02/17] Fix typescript errors --- package.json | 3 +-- src/BatchOperation.ts | 9 +++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index 2f0b7d2b..0fbd511d 100644 --- a/package.json +++ b/package.json @@ -46,8 +46,7 @@ "jest": { "testEnvironment": "node", "testPathIgnorePatterns": [ - "/node_modules/", - ".ts" + "/node_modules/" ] } } diff --git a/src/BatchOperation.ts b/src/BatchOperation.ts index 470ec578..c6b828c8 100644 --- a/src/BatchOperation.ts +++ b/src/BatchOperation.ts @@ -149,7 +149,7 @@ export abstract class BatchOperation if (unprocessedTables.has(table)) { ( this.state[table] as ThrottledTableConfiguration - ).tableThrottling.unprocessed.push(attributes); + ).tableThrottling?.unprocessed.push(attributes); this.toSend.splice(i, 1); } } @@ -169,6 +169,9 @@ export abstract class BatchOperation } private enqueueThrottled(table: ThrottledTableConfiguration): void { + if (table.tableThrottling == null) { + return; + } const { tableThrottling: { backoffWaiter, unprocessed }, } = table; @@ -210,7 +213,9 @@ export abstract class BatchOperation : await Promise.race([this.sourceNext, Promise.race(this.throttled)]); if (isIteratorResult(toProcess)) { - this.sourceDone = toProcess.done; + if (toProcess.done) { + this.sourceDone = true; + } if (!this.sourceDone) { this.addToSendQueue(toProcess.value); this.sourceNext = this.iterator.next(); From 0d8e960110e1aa9811b6c474e384402cb6fef5b9 Mon Sep 17 00:00:00 2001 From: bas-d <7903735+bas-d@users.noreply.github.com> Date: Fri, 3 Dec 2021 17:10:48 -0600 Subject: [PATCH 03/17] Add prettier. --- .prettierrc | 15 ++++++++ jest.config.js | 5 +++ package.json | 95 ++++++++++++++++++++++++-------------------------- 3 files changed, 65 insertions(+), 50 deletions(-) create mode 100644 .prettierrc create mode 100644 jest.config.js diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..f621e8fe --- /dev/null +++ b/.prettierrc @@ -0,0 +1,15 @@ +{ + "printWidth": 120, + "trailingComma": "none", + "singleQuote": true, + "tabWidth": 4, + "proseWrap": "never", + "overrides": [ + { + "files": ["*.yml", "*.yaml"], + "options": { + "tabWidth": 2 + } + } + ] +} diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 00000000..8cbf8940 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,5 @@ +/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', +}; \ No newline at end of file diff --git a/package.json b/package.json index 0fbd511d..d38df7b8 100644 --- a/package.json +++ b/package.json @@ -1,52 +1,47 @@ { - "name": "@aws/dynamodb-batch-iterator", - "version": "0.7.1", - "description": "Abstraction for DynamoDB batch reads and writes for that handles batch splitting and partial retries with exponential backoff", - "keywords": [ - "aws", - "dynamodb" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" - }, - "bugs": { - "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" - }, - "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-batch-iterator/", - "main": "./build/index.js", - "types": "./build/index.d.ts", - "scripts": { - "docs": "typedoc src", - "prepublishOnly": "tsc", - "pretest": "tsc -p tsconfig.test.json", - "test": "jest \"build/(.+).spec.js\"" - }, - "author": { - "name": "AWS SDK for JavaScript Team", - "email": "aws-sdk-js@amazon.com" - }, - "license": "Apache-2.0", - "devDependencies": { - "@types/jest": "^27.0.3", - "@types/node": "^16.11.11", - "jest": "^27.4.3", - "prettier": "^2.5.0", - "typedoc": "^0.22.10", - "typescript": "^4.5.2" - }, - "peerDependencies": { - "aws-sdk": "^2.7.0" - }, - "dependencies": { - "aws-sdk": "^2.1042.0", - "tslib": "^2.3.1", - "utf8-bytes": "^0.0.1" - }, - "jest": { - "testEnvironment": "node", - "testPathIgnorePatterns": [ - "/node_modules/" - ] - } + "name": "@aws/dynamodb-batch-iterator", + "version": "0.7.1", + "description": "Abstraction for DynamoDB batch reads and writes for that handles batch splitting and partial retries with exponential backoff", + "keywords": [ + "aws", + "dynamodb" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/awslabs/dynamodb-data-mapper-js.git" + }, + "bugs": { + "url": "https://github.com/awslabs/dynamodb-data-mapper-js/issues" + }, + "homepage": "https://awslabs.github.io/dynamodb-data-mapper-js/packages/dynamodb-batch-iterator/", + "main": "./build/index.js", + "types": "./build/index.d.ts", + "scripts": { + "docs": "typedoc src", + "prepublishOnly": "tsc", + "pretest": "tsc -p tsconfig.test.json", + "test": "jest \"build/(.+).spec.js\"" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "aws-sdk-js@amazon.com" + }, + "license": "Apache-2.0", + "devDependencies": { + "@aws-sdk/client-dynamodb": "^3.44.0", + "@types/jest": "^27.0.3", + "@types/node": "^16.11.11", + "jest": "^27.4.3", + "prettier": "^2.5.0", + "ts-jest": "^27.0.7", + "typedoc": "^0.22.10", + "typescript": "^4.5.2" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.44.0" + }, + "dependencies": { + "tslib": "^2.3.1", + "utf8-bytes": "^0.0.1" + } } From c26fc2b3a6b7ac0ed21fd2d471bc55e99880c619 Mon Sep 17 00:00:00 2001 From: bas-d <7903735+bas-d@users.noreply.github.com> Date: Fri, 3 Dec 2021 17:11:32 -0600 Subject: [PATCH 04/17] Update for aws-sdk v3. --- src/BatchGet.spec.ts | 423 ++++++++++++++++++++--------------------- src/BatchGet.ts | 60 +++--- src/BatchGetOptions.ts | 14 +- src/BatchOperation.ts | 389 ++++++++++++++++++------------------- src/types.ts | 54 +++--- 5 files changed, 446 insertions(+), 494 deletions(-) diff --git a/src/BatchGet.spec.ts b/src/BatchGet.spec.ts index 6fb4741d..14572da4 100644 --- a/src/BatchGet.spec.ts +++ b/src/BatchGet.spec.ts @@ -1,18 +1,23 @@ import { BatchGet, MAX_READ_BATCH_SIZE } from './BatchGet'; -import {AttributeMap, BatchGetItemInput, BatchGetItemOutput} from 'aws-sdk/clients/dynamodb'; +import { + AttributeValue, + BatchGetItemCommand, + BatchGetItemCommandInput, + BatchGetItemCommandOutput +} from '@aws-sdk/client-dynamodb'; describe('BatchGet', () => { - const promiseFunc = jest.fn(() => Promise.resolve({ - UnprocessedKeys: {} - } as BatchGetItemOutput)); const mockDynamoDbClient = { config: {}, - batchGetItem: jest.fn(() => ({promise: promiseFunc})), - } as any; + send: jest.fn(() => + Promise.resolve({ + UnprocessedKeys: {} + } as BatchGetItemCommandOutput) + ) + }; beforeEach(() => { - promiseFunc.mockClear(); - mockDynamoDbClient.batchGetItem.mockClear(); + mockDynamoDbClient.send.mockClear(); }); it('should return itself when its Symbol.asyncIterator method is called', () => { @@ -21,38 +26,31 @@ describe('BatchGet', () => { }); it('should allow setting an overall read consistency', async () => { - const batchGet = new BatchGet( - mockDynamoDbClient as any, - [['foo', {fizz: {N: '0'}}]], - {ConsistentRead: true} - ); + const batchGet = new BatchGet(mockDynamoDbClient as any, [['foo', { fizz: { N: '0' } }]], { + ConsistentRead: true + }); for await (const _ of batchGet) { console.log(_ === undefined); // pass } - expect(mockDynamoDbClient.batchGetItem.mock.calls).toEqual([ - [ - { - RequestItems: { - foo: { - Keys: [ - {fizz: {N: '0'}} - ], - ConsistentRead: true - } - } + //@ts-ignore + expect(mockDynamoDbClient.send.mock.calls[0][0].input).toEqual({ + RequestItems: { + foo: { + Keys: [{ fizz: { N: '0' } }], + ConsistentRead: true } - ] - ]) + } + }); }); it('should allow setting per-table read consistency', async () => { const batchGet = new BatchGet( mockDynamoDbClient as any, [ - ['foo', {fizz: {N: '0'}}], - ['bar', {quux: {N: '1'}}], + ['foo', { fizz: { N: '0' } }], + ['bar', { quux: { N: '1' } }] ], { ConsistentRead: true, @@ -66,34 +64,27 @@ describe('BatchGet', () => { // pass } - expect(mockDynamoDbClient.batchGetItem.mock.calls).toEqual([ - [ - { - RequestItems: { - foo: { - Keys: [ - {fizz: {N: '0'}} - ], - ConsistentRead: true - }, - bar: { - Keys: [ - {quux: {N: '1'}} - ], - ConsistentRead: false - } - } + //@ts-ignore + expect(mockDynamoDbClient.send.mock.calls[0][0].input).toEqual({ + RequestItems: { + foo: { + Keys: [{ fizz: { N: '0' } }], + ConsistentRead: true + }, + bar: { + Keys: [{ quux: { N: '1' } }], + ConsistentRead: false } - ] - ]); + } + }); }); it('should allow specifying per-table projection expressions', async () => { const batchGet = new BatchGet( mockDynamoDbClient as any, [ - ['foo', {fizz: {N: '0'}}], - ['bar', {quux: {N: '1'}}], + ['foo', { fizz: { N: '0' } }], + ['bar', { quux: { N: '1' } }] ], { PerTableOptions: { @@ -108,178 +99,171 @@ describe('BatchGet', () => { // pass } - expect(mockDynamoDbClient.batchGetItem.mock.calls).toEqual([ - [ - { - RequestItems: { - foo: { - Keys: [ - {fizz: {N: '0'}} - ] - }, - bar: { - Keys: [ - {quux: {N: '1'}} - ], - ProjectionExpression: 'snap[1].crackle.pop[2]' - } - } + //@ts-ignore + expect(mockDynamoDbClient.send.mock.calls[0][0].input).toEqual({ + RequestItems: { + foo: { + Keys: [{ fizz: { N: '0' } }] + }, + bar: { + Keys: [{ quux: { N: '1' } }], + ProjectionExpression: 'snap[1].crackle.pop[2]' } - ] - ]); + } + }); }); for (const asyncInput of [true, false]) { - it( - `should should partition get batches into requests with ${MAX_READ_BATCH_SIZE} or fewer items`, - async () => { - const gets: Array<[string, AttributeMap]> = []; - const expected: any = [ - [ - { - RequestItems: { - snap: { Keys: [] }, - crackle: { Keys: [] }, - pop: { Keys: [] }, - } - } - ], - [ - { - RequestItems: { - snap: { Keys: [] }, - crackle: { Keys: [] }, - pop: { Keys: [] }, - } - } - ], - [ - { - RequestItems: { - snap: { Keys: [] }, - crackle: { Keys: [] }, - pop: { Keys: [] }, - } - } - ], - [ - { - RequestItems: { - snap: { Keys: [] }, - crackle: { Keys: [] }, - pop: { Keys: [] }, - } + it(`should should partition get batches into requests with ${MAX_READ_BATCH_SIZE} or fewer items`, async () => { + const gets: Array<[string, Record]> = []; + const expected: any = [ + [ + new BatchGetItemCommand({ + RequestItems: { + snap: { Keys: [] }, + crackle: { Keys: [] }, + pop: { Keys: [] } } - ], - ]; - const responses: any = [ - { - Responses: { - snap: [], - crackle: [], - pop: [], + }) + ], + [ + new BatchGetItemCommand({ + RequestItems: { + snap: { Keys: [] }, + crackle: { Keys: [] }, + pop: { Keys: [] } } - }, - { - Responses: { - snap: [], - crackle: [], - pop: [], + }) + ], + [ + new BatchGetItemCommand({ + RequestItems: { + snap: { Keys: [] }, + crackle: { Keys: [] }, + pop: { Keys: [] } } - }, - { - Responses: { - snap: [], - crackle: [], - pop: [], + }) + ], + [ + new BatchGetItemCommand({ + RequestItems: { + snap: { Keys: [] }, + crackle: { Keys: [] }, + pop: { Keys: [] } } - }, - { - Responses: { - snap: [], - crackle: [], - pop: [], - } - }, - ]; - - for (let i = 0; i < 325; i++) { - const table = i % 3 === 0 - ? 'snap' - : i % 3 === 1 ? 'crackle' : 'pop'; - const fizz = { N: String(i) }; - const buzz = { S: 'Static string' }; - gets.push([table, {fizz: {N: String(i)}}]); - - responses[Math.floor(i / MAX_READ_BATCH_SIZE)] - .Responses[table] - .push({fizz, buzz}); - expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0] - .RequestItems[table].Keys - .push({fizz}); + }) + ] + ]; + const responses: BatchGetItemCommandOutput[] = [ + { + $metadata: {}, + Responses: { + snap: [], + crackle: [], + pop: [] + } + }, + { + $metadata: {}, + Responses: { + snap: [], + crackle: [], + pop: [] + } + }, + { + $metadata: {}, + Responses: { + snap: [], + crackle: [], + pop: [] + } + }, + { + $metadata: {}, + Responses: { + snap: [], + crackle: [], + pop: [] + } } + ]; - for (const response of responses) { - promiseFunc.mockImplementationOnce( - () => Promise.resolve(response) - ); - } + for (let i = 0; i < 325; i++) { + const table = i % 3 === 0 ? 'snap' : i % 3 === 1 ? 'crackle' : 'pop'; + const fizz = { N: String(i) }; + const buzz = { S: 'Static string' }; + gets.push([table, { fizz: { N: String(i) } }]); + + responses?.[Math.floor(i / MAX_READ_BATCH_SIZE)]?.Responses?.[table].push({ + fizz, + buzz + }); + expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0].input.RequestItems[table].ConsistentRead = undefined; + expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0].input.RequestItems[table].ExpressionAttributeNames = + undefined; + expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0].input.RequestItems[table].ProjectionExpression = + undefined; + expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0].input.RequestItems[table].Keys.push({ fizz }); + } - const input = asyncInput - ? async function *() { - for (const item of gets) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() - : gets; - - const seen = new Set(); - for await (const [table, item] of new BatchGet(mockDynamoDbClient as any, input)) { - const id = parseInt(item.fizz.N as string); - expect(seen.has(id)).toBe(false); - seen.add(id); - - if (id % 3 === 0) { - expect(table).toBe('snap'); - } else if (id % 3 === 1) { - expect(table).toBe('crackle'); - } else { - expect(table).toBe('pop'); - } + for (const response of responses) { + mockDynamoDbClient.send.mockImplementationOnce(() => Promise.resolve(response)); + } - expect(item.buzz).toEqual({ S: 'Static string' }); - } + const input = asyncInput + ? (async function* () { + for (const item of gets) { + await new Promise((resolve) => setTimeout(resolve, Math.round(Math.random()))); + yield item; + } + })() + : gets; + + const seen = new Set(); + for await (const [table, item] of new BatchGet(mockDynamoDbClient as any, input)) { + const id = parseInt(item.fizz.N as string); + expect(seen.has(id)).toBe(false); + seen.add(id); - expect(seen.size).toBe(gets.length); + if (id % 3 === 0) { + expect(table).toBe('snap'); + } else if (id % 3 === 1) { + expect(table).toBe('crackle'); + } else { + expect(table).toBe('pop'); + } - const {calls} = mockDynamoDbClient.batchGetItem.mock; - expect(calls.length) - .toBe(Math.ceil(gets.length / MAX_READ_BATCH_SIZE)); - expect(calls).toEqual(expected); + expect(item.buzz).toEqual({ S: 'Static string' }); } - ); + + expect(seen.size).toBe(gets.length); + + const { calls } = mockDynamoDbClient.send.mock; + expect(calls.length).toBe(Math.ceil(gets.length / MAX_READ_BATCH_SIZE)); + calls.forEach((call, index) => { + //@ts-ignore + expect(call[0].input).toEqual(expected[index][0].input); + }); + }); it('should should retry unprocessed items', async () => { const failures = new Set(['24', '66', '99', '103', '142', '178', '204', '260', '288']); - const gets: Array<[string, AttributeMap]> = []; + const gets: Array<[string, Record]> = []; for (let i = 0; i < 325; i++) { - const table = i % 3 === 0 - ? 'snap' - : i % 3 === 1 ? 'crackle' : 'pop'; - gets.push([table, {fizz: {N: String(i)}}]); + const table = i % 3 === 0 ? 'snap' : i % 3 === 1 ? 'crackle' : 'pop'; + gets.push([table, { fizz: { N: String(i) } }]); } const toBeFailed = new Set(failures); - promiseFunc.mockImplementation(() => { + mockDynamoDbClient.send.mockImplementation(async () => { const buzz = { S: 'Static string' }; - const response: BatchGetItemOutput = {}; + const response: BatchGetItemCommandOutput = { + $metadata: {} + }; - const {RequestItems} = (mockDynamoDbClient.batchGetItem.mock.calls.slice(-1)[0] as any)[0]; + const { RequestItems } = (mockDynamoDbClient.send.mock.calls.slice(-1)[0] as any)[0].input; for (const tableName of Object.keys(RequestItems)) { for (const item of RequestItems[tableName].Keys) { if (toBeFailed.has(item.fizz.N)) { @@ -288,10 +272,10 @@ describe('BatchGet', () => { } if (!(tableName in response.UnprocessedKeys)) { - response.UnprocessedKeys[tableName] = {Keys: []}; + response.UnprocessedKeys[tableName] = { Keys: [] }; } - response.UnprocessedKeys[tableName].Keys.push(item); + response.UnprocessedKeys?.[tableName]?.Keys?.push(item); toBeFailed.delete(item.fizz.N); } else { if (!response.Responses) { @@ -304,8 +288,8 @@ describe('BatchGet', () => { response.Responses[tableName].push({ ...item, - buzz, - }) + buzz + }); } } } @@ -314,15 +298,12 @@ describe('BatchGet', () => { }); const input = asyncInput - ? async function *() { - for (const item of gets) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() + ? (async function* () { + for (const item of gets) { + await new Promise((resolve) => setTimeout(resolve, Math.round(Math.random()))); + yield item; + } + })() : gets; let idsReturned = new Set(); @@ -345,19 +326,25 @@ describe('BatchGet', () => { expect(idsReturned.size).toBe(gets.length); expect(toBeFailed.size).toBe(0); - const {calls} = mockDynamoDbClient.batchGetItem.mock; + const { calls } = mockDynamoDbClient.send.mock; expect(calls.length).toBe(Math.ceil(gets.length / MAX_READ_BATCH_SIZE)); - const callCount: {[key: string]: number} = (calls as Array>).reduce( - ( - keyUseCount: {[key: string]: number}, - [{RequestItems}] - ) => { + const callCount: { [key: string]: number } = (calls as Array>).reduce( + (keyUseCount: { [key: string]: number }, call) => { + //@ts-ignore + const { RequestItems } = call[0].input; const keys = []; - for (const table of Object.keys(RequestItems)) { - keys.push(...RequestItems[table].Keys); + if (RequestItems != null) { + for (const table of Object.keys(RequestItems)) { + const k = RequestItems[table]?.Keys; + if (k != null) { + keys.push(...k); + } + } } - for (const {fizz: {N: key}} of keys) { + for (const { + fizz: { N: key } + } of keys) { if (key) { if (key in keyUseCount) { keyUseCount[key]++; diff --git a/src/BatchGet.ts b/src/BatchGet.ts index f79239c1..283716b4 100644 --- a/src/BatchGet.ts +++ b/src/BatchGet.ts @@ -1,8 +1,12 @@ import { BatchGetOptions, PerTableOptions } from './BatchGetOptions'; import { BatchOperation } from './BatchOperation'; import { SyncOrAsyncIterable, TableState } from './types'; -import { AttributeMap, BatchGetItemInput } from 'aws-sdk/clients/dynamodb'; -import DynamoDB = require('aws-sdk/clients/dynamodb'); +import { + DynamoDBClient, + BatchGetItemCommandInput, + AttributeValue, + BatchGetItemCommand +} from '@aws-sdk/client-dynamodb'; export const MAX_READ_BATCH_SIZE = 100; @@ -14,7 +18,8 @@ export const MAX_READ_BATCH_SIZE = 100; * unprocessed. Exponential backoff on unprocessed items is employed on a * per-table basis. */ -export class BatchGet extends BatchOperation { + +export class BatchGet extends BatchOperation> { protected readonly batchSize = MAX_READ_BATCH_SIZE; private readonly consistentRead?: boolean; @@ -30,12 +35,9 @@ export class BatchGet extends BatchOperation { * @param options Additional options to apply to the operations executed. */ constructor( - client: DynamoDB, - items: SyncOrAsyncIterable<[string, AttributeMap]>, - { - ConsistentRead, - PerTableOptions = {}, - }: BatchGetOptions = {} + client: DynamoDBClient, + items: SyncOrAsyncIterable<[string, Record]>, + { ConsistentRead, PerTableOptions = {} }: BatchGetOptions = {} ) { super(client, items); this.consistentRead = ConsistentRead; @@ -43,41 +45,41 @@ export class BatchGet extends BatchOperation { } protected async doBatchRequest() { - const operationInput: BatchGetItemInput = {RequestItems: {}}; + const operationInput: BatchGetItemCommandInput = { RequestItems: {} }; let batchSize = 0; while (this.toSend.length > 0) { - const [tableName, item] = this.toSend.shift() as [string, AttributeMap]; - if (operationInput.RequestItems[tableName] === undefined) { - const { - projection, - consistentRead, - attributeNames, - } = this.state[tableName]; + const [tableName, item] = this.toSend.shift() as [string, Record]; + if (operationInput.RequestItems === undefined) { + operationInput.RequestItems = {}; + } + + if (operationInput.RequestItems?.[tableName] === undefined) { + const { projection, consistentRead, attributeNames } = this.state[tableName]; operationInput.RequestItems[tableName] = { Keys: [], ConsistentRead: consistentRead, ProjectionExpression: projection, - ExpressionAttributeNames: attributeNames, + ExpressionAttributeNames: attributeNames }; } - operationInput.RequestItems[tableName].Keys.push(item); + operationInput.RequestItems?.[tableName]?.Keys?.push(item); if (++batchSize === this.batchSize) { break; } } - - const { - Responses = {}, - UnprocessedKeys = {}, - } = await this.client.batchGetItem(operationInput).promise(); + const command = new BatchGetItemCommand(operationInput); + const { Responses = {}, UnprocessedKeys = {} } = await this.client.send(command); const unprocessedTables = new Set(); for (const table of Object.keys(UnprocessedKeys)) { - unprocessedTables.add(table); - this.handleThrottled(table, UnprocessedKeys[table].Keys); + const keys = UnprocessedKeys[table].Keys; + if (keys != null) { + unprocessedTables.add(table); + this.handleThrottled(table, keys); + } } this.movePendingToThrottled(unprocessedTables); @@ -91,12 +93,12 @@ export class BatchGet extends BatchOperation { } } - protected getInitialTableState(tableName: string): TableState { + protected getInitialTableState(tableName: string): TableState> { const { ExpressionAttributeNames, ProjectionExpression, - ConsistentRead = this.consistentRead, - } = this.options[tableName] || {} as PerTableOptions; + ConsistentRead = this.consistentRead + } = this.options[tableName] || ({} as PerTableOptions); return { ...super.getInitialTableState(tableName), diff --git a/src/BatchGetOptions.ts b/src/BatchGetOptions.ts index 032a030d..208527e7 100644 --- a/src/BatchGetOptions.ts +++ b/src/BatchGetOptions.ts @@ -1,14 +1,10 @@ -import { - ConsistentRead, - ExpressionAttributeNameMap, - ProjectionExpression, -} from "aws-sdk/clients/dynamodb"; +import {} from '@aws-sdk/client-dynamodb'; export interface BatchGetOptions { /** * The default read consistency to apply to gets. */ - ConsistentRead?: ConsistentRead; + ConsistentRead?: boolean; /** * Options to apply for all reads directed to a specific table. @@ -24,16 +20,16 @@ export interface TableOptions { /** * The read consistency to apply to reads against this table. */ - ConsistentRead?: ConsistentRead; + ConsistentRead?: boolean; /** * One or more substitution tokens for attribute names in an expression. */ - ExpressionAttributeNames?: ExpressionAttributeNameMap; + ExpressionAttributeNames?: Record; /** * A string that identifies one or more attributes to retrieve from the * table. */ - ProjectionExpression?: ProjectionExpression; + ProjectionExpression?: string; } diff --git a/src/BatchOperation.ts b/src/BatchOperation.ts index c6b828c8..2ff7c022 100644 --- a/src/BatchOperation.ts +++ b/src/BatchOperation.ts @@ -1,248 +1,219 @@ -import { - BatchState, - SyncOrAsyncIterable, - TableState, - TableStateElement, - ThrottledTableConfiguration, -} from "./types"; -import DynamoDB = require("aws-sdk/clients/dynamodb"); +import { BatchState, SyncOrAsyncIterable, TableState, TableStateElement, ThrottledTableConfiguration } from './types'; +import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; if (Symbol && !Symbol.asyncIterator) { - (Symbol as any).asyncIterator = Symbol.for("__@@asyncIterator__"); + (Symbol as any).asyncIterator = Symbol.for('__@@asyncIterator__'); } export abstract class BatchOperation - implements AsyncIterableIterator<[string, Element]> + implements AsyncIterableIterator<[string, Element]> { - /** - * The maximum number of elements that may be included in a single batch. - */ - protected abstract readonly batchSize: number; - - /** - * Items that have been retrieved and are ready to be returned. - */ - protected readonly pending: Array<[string, Element]> = []; - - /** - * A mapping of table names to table-specific operation state (e.g., the - * number of throttling events experienced, etc.) - */ - protected readonly state: BatchState = {}; - - /** - * Input elements that are prepared for immediate dispatch - */ - protected readonly toSend: Array<[string, Element]> = []; - - private readonly throttled = new Set< - Promise> - >(); - private readonly iterator: - | Iterator<[string, Element]> - | AsyncIterator<[string, Element]>; - private sourceDone: boolean = false; - private sourceNext: - | IteratorResult<[string, Element]> - | Promise>; - private lastResolved?: Promise>; - - /** - * @param client The AWS SDK client with which to communicate with - * DynamoDB. - * @param items A synchronous or asynchronous iterable of tuples - * describing the operations to execute. The first member - * of the tuple should be the name of the table targeted by - * the operation. - */ - constructor( - protected readonly client: DynamoDB, - items: SyncOrAsyncIterable<[string, Element]> - ) { - if (isIterable(items)) { - this.iterator = items[Symbol.iterator](); - } else { - this.iterator = items[Symbol.asyncIterator](); - } - this.sourceNext = this.iterator.next(); - } - - next(): Promise> { - if (this.lastResolved) { - this.lastResolved = this.lastResolved.then(() => this.getNext()); - } else { - this.lastResolved = this.getNext(); + /** + * The maximum number of elements that may be included in a single batch. + */ + protected abstract readonly batchSize: number; + + /** + * Items that have been retrieved and are ready to be returned. + */ + protected readonly pending: Array<[string, Element]> = []; + + /** + * A mapping of table names to table-specific operation state (e.g., the + * number of throttling events experienced, etc.) + */ + protected readonly state: BatchState = {}; + + /** + * Input elements that are prepared for immediate dispatch + */ + protected readonly toSend: Array<[string, Element]> = []; + + private readonly throttled = new Set>>(); + private readonly iterator: Iterator<[string, Element]> | AsyncIterator<[string, Element]>; + private sourceDone: boolean = false; + private sourceNext: IteratorResult<[string, Element]> | Promise>; + private lastResolved?: Promise>; + + /** + * @param client The AWS SDK client with which to communicate with + * DynamoDB. + * @param items A synchronous or asynchronous iterable of tuples + * describing the operations to execute. The first member + * of the tuple should be the name of the table targeted by + * the operation. + */ + constructor(protected readonly client: DynamoDBClient, items: SyncOrAsyncIterable<[string, Element]>) { + if (isIterable(items)) { + this.iterator = items[Symbol.iterator](); + } else { + this.iterator = items[Symbol.asyncIterator](); + } + this.sourceNext = this.iterator.next(); } - return this.lastResolved; - } - - [Symbol.asyncIterator]() { - return this; - } - - /** - * Execute a single batch request and process the result. - */ - protected abstract doBatchRequest(): Promise; - - /** - * Create and return the initial state object for a given DynamoDB table. - * - * @param tableName The name of the table whose initial state should be - * returned. - */ - protected getInitialTableState(tableName: string): TableState { - return { - backoffFactor: 0, - name: tableName, - }; - } - - /** - * Accept an array of unprocessed items belonging to a single table and - * re-enqueue it for submission, making sure the appropriate level of - * backoff is applied to future operations on the same table. - * - * @param tableName The table to which the unprocessed elements belong. - * @param unprocessed Elements returned by DynamoDB as not yet processed. - * The elements should not be unmarshalled, but they - * should be reverted to the form used for elements - * that have not yet been sent. - */ - protected handleThrottled( - tableName: string, - unprocessed: Array - ): void { - const tableState = this.state[tableName]; - tableState.backoffFactor++; - - if (tableState.tableThrottling) { - this.throttled.delete(tableState.tableThrottling.backoffWaiter); - unprocessed.unshift(...tableState.tableThrottling.unprocessed); - } + next(): Promise> { + if (this.lastResolved) { + this.lastResolved = this.lastResolved.then(() => this.getNext()); + } else { + this.lastResolved = this.getNext(); + } - tableState.tableThrottling = { - unprocessed, - backoffWaiter: new Promise((resolve) => { - setTimeout( - resolve, - exponentialBackoff(tableState.backoffFactor), - tableState - ); - }), - }; - - this.throttled.add(tableState.tableThrottling.backoffWaiter); - } - - /** - * Iterate over all pending writes and move those targeting throttled tables - * into the throttled queue. - * - * @param unprocessedTables A set of tables for which some items were - * returned without being processed. - */ - protected movePendingToThrottled(unprocessedTables: Set) { - for (let i = this.toSend.length - 1; i > -1; i--) { - const [table, attributes] = this.toSend[i]; - if (unprocessedTables.has(table)) { - ( - this.state[table] as ThrottledTableConfiguration - ).tableThrottling?.unprocessed.push(attributes); - this.toSend.splice(i, 1); - } + return this.lastResolved; } - } - private addToSendQueue([tableName, attributes]: [string, Element]): void { - if (!this.state[tableName]) { - this.state[tableName] = this.getInitialTableState(tableName); + [Symbol.asyncIterator]() { + return this; } - const tableState = this.state[tableName]; - if (tableState.tableThrottling) { - tableState.tableThrottling.unprocessed.push(attributes); - } else { - this.toSend.push([tableName, attributes]); + /** + * Execute a single batch request and process the result. + */ + protected abstract doBatchRequest(): Promise; + + /** + * Create and return the initial state object for a given DynamoDB table. + * + * @param tableName The name of the table whose initial state should be + * returned. + */ + protected getInitialTableState(tableName: string): TableState { + return { + backoffFactor: 0, + name: tableName + }; } - } - private enqueueThrottled(table: ThrottledTableConfiguration): void { - if (table.tableThrottling == null) { - return; - } - const { - tableThrottling: { backoffWaiter, unprocessed }, - } = table; - if (unprocessed.length > 0) { - this.toSend.push( - ...unprocessed.map((attr) => [table.name, attr] as [string, Element]) - ); - } + /** + * Accept an array of unprocessed items belonging to a single table and + * re-enqueue it for submission, making sure the appropriate level of + * backoff is applied to future operations on the same table. + * + * @param tableName The table to which the unprocessed elements belong. + * @param unprocessed Elements returned by DynamoDB as not yet processed. + * The elements should not be unmarshalled, but they + * should be reverted to the form used for elements + * that have not yet been sent. + */ + protected handleThrottled(tableName: string, unprocessed: Array): void { + const tableState = this.state[tableName]; + tableState.backoffFactor++; + + if (tableState.tableThrottling) { + this.throttled.delete(tableState.tableThrottling.backoffWaiter); + unprocessed.unshift(...tableState.tableThrottling.unprocessed); + } + + tableState.tableThrottling = { + unprocessed, + backoffWaiter: new Promise((resolve) => { + setTimeout(resolve, exponentialBackoff(tableState.backoffFactor), tableState); + }) + }; - this.throttled.delete(backoffWaiter); - delete table.tableThrottling; - } - - private async getNext(): Promise> { - if ( - this.sourceDone && - this.pending.length === 0 && - this.toSend.length === 0 && - this.throttled.size === 0 - ) { - return { done: true } as IteratorResult<[string, Element]>; + this.throttled.add(tableState.tableThrottling.backoffWaiter); } - if (this.pending.length > 0) { - return { - done: false, - value: this.pending.shift() as [string, Element], - }; + /** + * Iterate over all pending writes and move those targeting throttled tables + * into the throttled queue. + * + * @param unprocessedTables A set of tables for which some items were + * returned without being processed. + */ + protected movePendingToThrottled(unprocessedTables: Set) { + for (let i = this.toSend.length - 1; i > -1; i--) { + const [table, attributes] = this.toSend[i]; + if (unprocessedTables.has(table)) { + (this.state[table] as ThrottledTableConfiguration).tableThrottling?.unprocessed.push( + attributes + ); + this.toSend.splice(i, 1); + } + } } - await this.refillPending(); - return this.getNext(); - } + private addToSendQueue([tableName, attributes]: [string, Element]): void { + if (!this.state[tableName]) { + this.state[tableName] = this.getInitialTableState(tableName); + } + const tableState = this.state[tableName]; - private async refillPending() { - while (!this.sourceDone && this.toSend.length < this.batchSize) { - const toProcess = isIteratorResult(this.sourceNext) - ? this.sourceNext - : await Promise.race([this.sourceNext, Promise.race(this.throttled)]); + if (tableState.tableThrottling) { + tableState.tableThrottling.unprocessed.push(attributes); + } else { + this.toSend.push([tableName, attributes]); + } + } - if (isIteratorResult(toProcess)) { - if (toProcess.done) { - this.sourceDone = true; + private enqueueThrottled(table: ThrottledTableConfiguration): void { + if (table.tableThrottling == null) { + return; } - if (!this.sourceDone) { - this.addToSendQueue(toProcess.value); - this.sourceNext = this.iterator.next(); + const { + tableThrottling: { backoffWaiter, unprocessed } + } = table; + if (unprocessed.length > 0) { + this.toSend.push(...unprocessed.map((attr) => [table.name, attr] as [string, Element])); } - } else { - this.enqueueThrottled(toProcess); - } + + this.throttled.delete(backoffWaiter); + delete table.tableThrottling; } - while (this.toSend.length < this.batchSize && this.throttled.size > 0) { - this.enqueueThrottled(await Promise.race(this.throttled)); + private async getNext(): Promise> { + if (this.sourceDone && this.pending.length === 0 && this.toSend.length === 0 && this.throttled.size === 0) { + return { done: true } as IteratorResult<[string, Element]>; + } + + if (this.pending.length > 0) { + return { + done: false, + value: this.pending.shift() as [string, Element] + }; + } + + await this.refillPending(); + return this.getNext(); } - if (this.toSend.length > 0) { - await this.doBatchRequest(); + private async refillPending() { + while (!this.sourceDone && this.toSend.length < this.batchSize) { + const toProcess = isIteratorResult(this.sourceNext) + ? this.sourceNext + : await Promise.race([this.sourceNext, Promise.race(this.throttled)]); + + if (isIteratorResult(toProcess)) { + if (toProcess.done) { + this.sourceDone = true; + } + if (!this.sourceDone) { + this.addToSendQueue(toProcess.value); + this.sourceNext = this.iterator.next(); + } + } else { + this.enqueueThrottled(toProcess); + } + } + + while (this.toSend.length < this.batchSize && this.throttled.size > 0) { + this.enqueueThrottled(await Promise.race(this.throttled)); + } + + if (this.toSend.length > 0) { + await this.doBatchRequest(); + } } - } } function exponentialBackoff(attempts: number) { - return Math.floor(Math.random() * Math.pow(2, attempts)); + return Math.floor(Math.random() * Math.pow(2, attempts)); } function isIterable(arg: any): arg is Iterable { - return Boolean(arg) && typeof arg[Symbol.iterator] === "function"; + return Boolean(arg) && typeof arg[Symbol.iterator] === 'function'; } function isIteratorResult(arg: any): arg is IteratorResult { - return Boolean(arg) && typeof arg.done === "boolean"; + return Boolean(arg) && typeof arg.done === 'boolean'; } diff --git a/src/types.ts b/src/types.ts index 04d83093..56290a34 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,12 +1,9 @@ import { - AttributeMap, - ConsistentRead, - DeleteRequest, - ExpressionAttributeNameMap, - ProjectionExpression, - PutRequest, - WriteRequest as DynamoDbWriteRequest, -} from "aws-sdk/clients/dynamodb"; + AttributeValue, + DeleteRequest, + PutRequest, + WriteRequest as DynamoDbWriteRequest +} from '@aws-sdk/client-dynamodb'; /** * A synchronous or asynchronous iterable. @@ -17,40 +14,39 @@ export type SyncOrAsyncIterable = Iterable | AsyncIterable; * @internal */ export interface BatchState { - [tableName: string]: TableState; + [tableName: string]: TableState; } /** * @internal */ export interface TableState { - attributeNames?: ExpressionAttributeNameMap; - backoffFactor: number; - consistentRead?: ConsistentRead; - name: string; - projection?: ProjectionExpression; - tableThrottling?: TableThrottlingTracker; + attributeNames?: Record; + backoffFactor: number; + consistentRead?: boolean; + name: string; + projection?: string; + tableThrottling?: TableThrottlingTracker; } /** * @internal */ -export type TableStateElement = AttributeMap | WriteRequest; +export type TableStateElement = Record | WriteRequest; /** * @internal */ export interface TableThrottlingTracker { - backoffWaiter: Promise>; - unprocessed: Array; + backoffWaiter: Promise>; + unprocessed: Array; } /** * @internal */ -export interface ThrottledTableConfiguration - extends TableState { - tableThrottling?: TableThrottlingTracker; +export interface ThrottledTableConfiguration extends TableState { + tableThrottling?: TableThrottlingTracker; } /** @@ -58,11 +54,11 @@ export interface ThrottledTableConfiguration * properties has been defined. */ export type WriteRequest = - | (DynamoDbWriteRequest & { - PutRequest: PutRequest; - DeleteRequest?: undefined; - }) - | (DynamoDbWriteRequest & { - DeleteRequest: DeleteRequest; - PutRequest?: undefined; - }); + | (DynamoDbWriteRequest & { + PutRequest: PutRequest; + DeleteRequest?: undefined; + }) + | (DynamoDbWriteRequest & { + DeleteRequest: DeleteRequest; + PutRequest?: undefined; + }); From 9f41a13ecfbcf29fecef96c423834615b7015298 Mon Sep 17 00:00:00 2001 From: bas-d <7903735+bas-d@users.noreply.github.com> Date: Fri, 3 Dec 2021 17:12:16 -0600 Subject: [PATCH 05/17] Update itemIdentifier tests --- src/itemIdentifier.spec.ts | 96 ++++++++++++++++---------------------- src/itemIdentifier.ts | 39 ++++++---------- 2 files changed, 56 insertions(+), 79 deletions(-) diff --git a/src/itemIdentifier.spec.ts b/src/itemIdentifier.spec.ts index 67cf7841..679b42ca 100644 --- a/src/itemIdentifier.spec.ts +++ b/src/itemIdentifier.spec.ts @@ -2,76 +2,62 @@ import { itemIdentifier } from './itemIdentifier'; describe('itemIdentifier', () => { it('should serialize all top-level string attributes', () => { - expect( - itemIdentifier('table', {DeleteRequest: {Key: {foo: {S: 'bar'}}}}) - ).toBe('table::delete::foo=bar'); + expect(itemIdentifier('table', { DeleteRequest: { Key: { foo: { S: 'bar' } } } })).toBe( + 'table::delete::foo=bar' + ); - expect( - itemIdentifier('table', {PutRequest: {Item: {foo: {S: 'bar'}}}}) - ).toBe('table::put::foo=bar'); + expect(itemIdentifier('table', { PutRequest: { Item: { foo: { S: 'bar' } } } })).toBe('table::put::foo=bar'); }); it('should serialize all top-level number attributes', () => { - expect( - itemIdentifier('table', {DeleteRequest: {Key: {foo: {N: '1'}}}}) - ).toBe('table::delete::foo=1'); + expect(itemIdentifier('table', { DeleteRequest: { Key: { foo: { N: '1' } } } })).toBe('table::delete::foo=1'); - expect( - itemIdentifier('table', {PutRequest: {Item: {foo: {N: '1'}}}}) - ).toBe('table::put::foo=1'); + expect(itemIdentifier('table', { PutRequest: { Item: { foo: { N: '1' } } } })).toBe('table::put::foo=1'); }); it('should serialize all top-level binary attributes', () => { - expect( - itemIdentifier('table', {DeleteRequest: {Key: {foo: {B: Uint8Array.from([0xde, 0xad])}}}}) - ).toBe('table::delete::foo=222,173'); + expect(itemIdentifier('table', { DeleteRequest: { Key: { foo: { B: Uint8Array.from([0xde, 0xad]) } } } })).toBe( + 'table::delete::foo=222,173' + ); - expect( - itemIdentifier('table', {PutRequest: {Item: {foo: {B: Uint8Array.from([0xde, 0xad])}}}}) - ).toBe('table::put::foo=222,173'); + expect(itemIdentifier('table', { PutRequest: { Item: { foo: { B: Uint8Array.from([0xde, 0xad]) } } } })).toBe( + 'table::put::foo=222,173' + ); }); - it( - 'should serialize different representations of the same binary data in the same way', - () => { - expect( - itemIdentifier( - 'table', - {DeleteRequest: {Key: {foo: {B: '🐎👱❤'}}}} - ) - ).toBe( - itemIdentifier( - 'table', - {DeleteRequest: {Key: {foo: {B: Uint8Array.from([240, 159, 144, 142, 240, 159, 145, 177, 226, 157, 164])}}}} - ) - ); + it('should serialize different representations of the same binary data in the same way', () => { + //@ts-ignore + expect(itemIdentifier('table', { DeleteRequest: { Key: { foo: { B: '🐎👱❤' } } } })).toBe( + itemIdentifier('table', { + DeleteRequest: { + Key: { foo: { B: Uint8Array.from([240, 159, 144, 142, 240, 159, 145, 177, 226, 157, 164]) } } + } + }) + ); - expect( - itemIdentifier( - 'table', - {DeleteRequest: {Key: {foo: {B: '🐎👱❤'}}}} - ) - ).toBe( - itemIdentifier( - 'table', - {DeleteRequest: {Key: {foo: {B: Uint8Array.from([240, 159, 144, 142, 240, 159, 145, 177, 226, 157, 164]).buffer}}}} - ) - ); - } - ); + expect( + itemIdentifier( + 'table', + //@ts-ignore + { DeleteRequest: { Key: { foo: { B: '🐎👱❤' } } } } + ) + ).toBe( + itemIdentifier('table', { + DeleteRequest: { + Key: { foo: { B: Uint8Array.from([240, 159, 144, 142, 240, 159, 145, 177, 226, 157, 164]) } } + } + }) + ); + }); it('should throw when an invalid binary value is provided', () => { expect( - () => itemIdentifier('table', {PutRequest: {Item: {foo: {B: []}}}}) + //@ts-ignore + () => itemIdentifier('table', { PutRequest: { Item: { foo: { B: [] } } } }) ).toThrow(); }); - it( - 'should throw when neither a PutRequest nor a DeleteRequest is provided', - () => { - expect( - () => itemIdentifier('table', {} as any) - ).toThrow(); - } - ); -}); \ No newline at end of file + it('should throw when neither a PutRequest nor a DeleteRequest is provided', () => { + expect(() => itemIdentifier('table', {} as any)).toThrow(); + }); +}); diff --git a/src/itemIdentifier.ts b/src/itemIdentifier.ts index e703a656..db38faa8 100644 --- a/src/itemIdentifier.ts +++ b/src/itemIdentifier.ts @@ -1,28 +1,21 @@ import { WriteRequest } from './types'; -import { AttributeMap, BinaryAttributeValue } from 'aws-sdk/clients/dynamodb'; +import { AttributeValue } from '@aws-sdk/client-dynamodb'; const bytes = require('utf8-bytes'); /** * @internal */ -export function itemIdentifier( - tableName: string, - {DeleteRequest, PutRequest}: WriteRequest -): string { - if (DeleteRequest) { - return `${tableName}::delete::${ - serializeKeyTypeAttributes(DeleteRequest.Key) - }`; - } else if (PutRequest) { - return `${tableName}::put::${ - serializeKeyTypeAttributes(PutRequest.Item) - }`; +export function itemIdentifier(tableName: string, { DeleteRequest, PutRequest }: WriteRequest): string { + if (DeleteRequest?.Key != null) { + return `${tableName}::delete::${serializeKeyTypeAttributes(DeleteRequest.Key)}`; + } else if (PutRequest?.Item != null) { + return `${tableName}::put::${serializeKeyTypeAttributes(PutRequest.Item)}`; } - + throw new Error(`Invalid write request provided`); } -function serializeKeyTypeAttributes(attributes: AttributeMap): string { +function serializeKeyTypeAttributes(attributes: Record): string { const keyTypeProperties: Array = []; for (const property of Object.keys(attributes).sort()) { const attribute = attributes[property]; @@ -38,13 +31,9 @@ function serializeKeyTypeAttributes(attributes: AttributeMap): string { return keyTypeProperties.join('&'); } -function toByteArray(value: BinaryAttributeValue): Uint8Array { +function toByteArray(value: any): Uint8Array { if (ArrayBuffer.isView(value)) { - return new Uint8Array( - value.buffer, - value.byteOffset, - value.byteLength - ); + return new Uint8Array(value.buffer, value.byteOffset, value.byteLength); } if (typeof value === 'string') { @@ -59,6 +48,8 @@ function toByteArray(value: BinaryAttributeValue): Uint8Array { } function isArrayBuffer(arg: any): arg is ArrayBuffer { - return (typeof ArrayBuffer === 'function' && arg instanceof ArrayBuffer) || - Object.prototype.toString.call(arg) === '[object ArrayBuffer]'; -} \ No newline at end of file + return ( + (typeof ArrayBuffer === 'function' && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === '[object ArrayBuffer]' + ); +} From 931831f656d8c027599b48524fad3734775d1b87 Mon Sep 17 00:00:00 2001 From: bas-d <7903735+bas-d@users.noreply.github.com> Date: Fri, 3 Dec 2021 17:32:14 -0600 Subject: [PATCH 06/17] Fix unit tests --- src/BatchGet.spec.ts | 15 +-- src/BatchWrite.spec.ts | 268 ++++++++++++++++++++--------------------- src/BatchWrite.ts | 29 ++--- src/itemIdentifier.ts | 3 +- src/types.ts | 33 +++-- 5 files changed, 157 insertions(+), 191 deletions(-) diff --git a/src/BatchGet.spec.ts b/src/BatchGet.spec.ts index 14572da4..681166a2 100644 --- a/src/BatchGet.spec.ts +++ b/src/BatchGet.spec.ts @@ -1,10 +1,5 @@ import { BatchGet, MAX_READ_BATCH_SIZE } from './BatchGet'; -import { - AttributeValue, - BatchGetItemCommand, - BatchGetItemCommandInput, - BatchGetItemCommandOutput -} from '@aws-sdk/client-dynamodb'; +import { AttributeValue, BatchGetItemCommand, BatchGetItemCommandOutput } from '@aws-sdk/client-dynamodb'; describe('BatchGet', () => { const mockDynamoDbClient = { @@ -199,11 +194,6 @@ describe('BatchGet', () => { fizz, buzz }); - expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0].input.RequestItems[table].ConsistentRead = undefined; - expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0].input.RequestItems[table].ExpressionAttributeNames = - undefined; - expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0].input.RequestItems[table].ProjectionExpression = - undefined; expected[Math.floor(i / MAX_READ_BATCH_SIZE)][0].input.RequestItems[table].Keys.push({ fizz }); } @@ -329,9 +319,8 @@ describe('BatchGet', () => { const { calls } = mockDynamoDbClient.send.mock; expect(calls.length).toBe(Math.ceil(gets.length / MAX_READ_BATCH_SIZE)); - const callCount: { [key: string]: number } = (calls as Array>).reduce( + const callCount: { [key: string]: number } = (calls as Array>).reduce( (keyUseCount: { [key: string]: number }, call) => { - //@ts-ignore const { RequestItems } = call[0].input; const keys = []; if (RequestItems != null) { diff --git a/src/BatchWrite.spec.ts b/src/BatchWrite.spec.ts index a4f09ee3..5d1bc10e 100644 --- a/src/BatchWrite.spec.ts +++ b/src/BatchWrite.spec.ts @@ -1,19 +1,19 @@ import { BatchWrite, MAX_WRITE_BATCH_SIZE } from './BatchWrite'; -import { WriteRequest } from './types'; -import {BatchWriteItemInput, BatchWriteItemOutput} from 'aws-sdk/clients/dynamodb'; +// import { WriteRequest } from './types'; +import { BatchWriteItemCommand, BatchWriteItemCommandOutput, WriteRequest } from '@aws-sdk/client-dynamodb'; describe('BatchWrite', () => { - const promiseFunc = jest.fn(() => Promise.resolve({ - UnprocessedItems: {} - } as BatchWriteItemOutput)); const mockDynamoDbClient = { config: {}, - batchWriteItem: jest.fn(() => ({promise: promiseFunc})), + send: jest.fn(() => + Promise.resolve({ + UnprocessedItems: {} + } as BatchWriteItemCommandOutput) + ) }; beforeEach(() => { - promiseFunc.mockClear(); - mockDynamoDbClient.batchWriteItem.mockClear(); + mockDynamoDbClient.send.mockClear(); }); it('should return itself when its Symbol.asyncIterator method is called', () => { @@ -22,95 +22,86 @@ describe('BatchWrite', () => { }); for (const asyncInput of [true, false]) { - it( - `should should partition write batches into requests with ${MAX_WRITE_BATCH_SIZE} or fewer items`, - async () => { - const writes: Array<[string, WriteRequest]> = []; - const expected: any = [ - [ - { - RequestItems: { - snap: [], - crackle: [], - pop: [], - } + it(`should should partition write batches into requests with ${MAX_WRITE_BATCH_SIZE} or fewer items`, async () => { + const writes: Array<[string, WriteRequest]> = []; + const expected: any = [ + [ + new BatchWriteItemCommand({ + RequestItems: { + snap: [], + crackle: [], + pop: [] } - ], - [ - { - RequestItems: { - snap: [], - crackle: [], - pop: [], - } + }) + ], + [ + new BatchWriteItemCommand({ + RequestItems: { + snap: [], + crackle: [], + pop: [] } - ], - [ - { - RequestItems: { - snap: [], - crackle: [], - pop: [], - } + }) + ], + [ + new BatchWriteItemCommand({ + RequestItems: { + snap: [], + crackle: [], + pop: [] } - ], - [ - { - RequestItems: { - snap: [], - crackle: [], - pop: [], - } + }) + ], + [ + new BatchWriteItemCommand({ + RequestItems: { + snap: [], + crackle: [], + pop: [] } - ], - ]; - - for (let i = 0; i < 80; i++) { - const table = i % 3 === 0 - ? 'snap' - : i % 3 === 1 ? 'crackle' : 'pop'; - const fizz = { N: String(i) }; - const req: WriteRequest = i % 2 === 0 - ? {DeleteRequest: {Key: {fizz}}} - : {PutRequest: {Item: {fizz}}}; - writes.push([table, req]); - expected[Math.floor(i / MAX_WRITE_BATCH_SIZE)][0] - .RequestItems[table] - .push(req); - } + }) + ] + ]; - const input = asyncInput - ? async function *() { - for (const item of writes) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() - : writes; - - for await (const [tableName, req] of new BatchWrite(mockDynamoDbClient as any, input)) { - const id = req.DeleteRequest - ? parseInt(req.DeleteRequest.Key.fizz.N as string) - : parseInt((req.PutRequest as any).Item.fizz.N as string); - - if (id % 3 === 0) { - expect(tableName).toBe('snap'); - } else if (id % 3 === 1) { - expect(tableName).toBe('crackle'); - } else { - expect(tableName).toBe('pop'); - } - } + for (let i = 0; i < 80; i++) { + const table = i % 3 === 0 ? 'snap' : i % 3 === 1 ? 'crackle' : 'pop'; + const fizz = { N: String(i) }; + const req: WriteRequest = + i % 2 === 0 ? { DeleteRequest: { Key: { fizz } } } : { PutRequest: { Item: { fizz } } }; + writes.push([table, req]); + expected[Math.floor(i / MAX_WRITE_BATCH_SIZE)][0].input.RequestItems[table].push(req); + } + + const input = asyncInput + ? (async function* () { + for (const item of writes) { + await new Promise((resolve) => setTimeout(resolve, Math.round(Math.random()))); + yield item; + } + })() + : writes; + + for await (const [tableName, req] of new BatchWrite(mockDynamoDbClient as any, input)) { + const id = req.DeleteRequest?.Key + ? parseInt(req.DeleteRequest.Key.fizz.N as string) + : parseInt((req.PutRequest as any).Item.fizz.N as string); - const {calls} = mockDynamoDbClient.batchWriteItem.mock; - expect(calls.length) - .toBe(Math.ceil(writes.length / MAX_WRITE_BATCH_SIZE)); - expect(calls).toEqual(expected); + if (id % 3 === 0) { + expect(tableName).toBe('snap'); + } else if (id % 3 === 1) { + expect(tableName).toBe('crackle'); + } else { + expect(tableName).toBe('pop'); + } } - ); + + const { calls } = mockDynamoDbClient.send.mock; + expect(calls.length).toBe(Math.ceil(writes.length / MAX_WRITE_BATCH_SIZE)); + calls.forEach((call, index) => { + //@ts-ignore + expect(call[0].input).toEqual(expected[index][0].input); + }); + }); it('should should retry unprocessed items', async () => { const failures = new Set(['21', '24', '38', '43', '55', '60']); @@ -118,19 +109,23 @@ describe('BatchWrite', () => { const unprocessed = new Map(); for (let i = 0; i < 80; i++) { - const table = i % 3 === 0 - ? 'snap' - : i % 3 === 1 ? 'crackle' : 'pop'; + const table = i % 3 === 0 ? 'snap' : i % 3 === 1 ? 'crackle' : 'pop'; const fizz = { N: String(i) }; - const req: WriteRequest = i % 2 === 0 - ? {DeleteRequest: {Key: {fizz}}} - : {PutRequest: {Item: { - fizz, - buzz: {B: new ArrayBuffer(3)}, - pop: {B: Uint8Array.from([i])}, - foo: {B: String.fromCharCode(i + 32)}, - quux: {S: 'string'} - }}}; + //@ts-ignore + const req: WriteRequest = + i % 2 === 0 + ? { DeleteRequest: { Key: { fizz } } } + : { + PutRequest: { + Item: { + fizz, + buzz: { B: new ArrayBuffer(3) }, + pop: { B: Uint8Array.from([i]) }, + foo: { B: String.fromCharCode(i + 32) }, + quux: { S: 'string' } + } + } + }; writes.push([table, req]); if (failures.has(fizz.N)) { @@ -138,12 +133,14 @@ describe('BatchWrite', () => { } } - promiseFunc.mockImplementation(async () => { - const response: BatchWriteItemOutput = {}; + mockDynamoDbClient.send.mockImplementation(async () => { + const response: BatchWriteItemCommandOutput = { + $metadata: {} + }; - const {RequestItems} = (mockDynamoDbClient.batchWriteItem.mock.calls.slice(-1)[0] as any)[0]; + const { RequestItems } = (mockDynamoDbClient.send.mock.calls.slice(-1)[0] as any)[0].input; for (const tableName of Object.keys(RequestItems)) { - for (const {DeleteRequest, PutRequest} of RequestItems[tableName]) { + for (const { DeleteRequest, PutRequest } of RequestItems[tableName]) { const item = DeleteRequest ? DeleteRequest.Key : PutRequest.Item; if (unprocessed.has(item.fizz.N)) { if (!response.UnprocessedItems) { @@ -154,9 +151,7 @@ describe('BatchWrite', () => { response.UnprocessedItems[tableName] = []; } - response.UnprocessedItems[tableName].push( - unprocessed.get(item.fizz.N) as object - ); + response.UnprocessedItems[tableName].push(unprocessed.get(item.fizz.N) as object); unprocessed.delete(item.fizz.N); } } @@ -166,21 +161,18 @@ describe('BatchWrite', () => { }); const input = asyncInput - ? async function *() { - for (const item of writes) { - await new Promise(resolve => setTimeout( - resolve, - Math.round(Math.random()) - )); - yield item; - } - }() + ? (async function* () { + for (const item of writes) { + await new Promise((resolve) => setTimeout(resolve, Math.round(Math.random()))); + yield item; + } + })() : writes; const seen = new Set(); for await (const [tableName, req] of new BatchWrite(mockDynamoDbClient as any, input)) { const id = req.DeleteRequest - ? parseInt(req.DeleteRequest.Key.fizz.N as string) + ? parseInt(req.DeleteRequest?.Key?.fizz.N as string) : parseInt((req.PutRequest as any).Item.fizz.N as string); expect(seen.has(id)).toBe(false); @@ -197,28 +189,26 @@ describe('BatchWrite', () => { expect(seen.size).toBe(writes.length); - const {calls} = mockDynamoDbClient.batchWriteItem.mock; - expect(calls.length) - .toBe(Math.ceil(writes.length / MAX_WRITE_BATCH_SIZE)); - - const callCount: {[key: string]: number} = (calls as Array>).reduce( - ( - keyUseCount: {[key: string]: number}, - [{RequestItems}] - ) => { - for (const table of Object.keys(RequestItems)) { - for (const {PutRequest, DeleteRequest} of RequestItems[table]) { - let key = DeleteRequest - ? DeleteRequest.Key.fizz.N - : (PutRequest as any).Item.fizz.N; - if (key in keyUseCount) { - keyUseCount[key]++; - } else { - keyUseCount[key] = 1; - } + const { calls } = mockDynamoDbClient.send.mock; + expect(calls.length).toBe(Math.ceil(writes.length / MAX_WRITE_BATCH_SIZE)); + + const callCount: { [key: string]: number } = (calls as Array>).reduce( + (keyUseCount: { [key: string]: number }, call) => { + const { RequestItems } = call[0].input; + if (RequestItems != null) { + for (const table of Object.keys(RequestItems)) { + RequestItems[table].forEach(({ PutRequest, DeleteRequest }) => { + let key = DeleteRequest?.Key + ? DeleteRequest.Key.fizz.N + : (PutRequest as any).Item.fizz.N; + if (key in keyUseCount) { + keyUseCount[key]++; + } else { + keyUseCount[key] = 1; + } + }); } } - return keyUseCount; }, {} diff --git a/src/BatchWrite.ts b/src/BatchWrite.ts index 2d0033a2..fcc23e1c 100644 --- a/src/BatchWrite.ts +++ b/src/BatchWrite.ts @@ -1,7 +1,6 @@ import { BatchOperation } from './BatchOperation'; import { itemIdentifier } from './itemIdentifier'; -import { WriteRequest } from './types'; -import { BatchWriteItemInput } from 'aws-sdk/clients/dynamodb'; +import { BatchWriteItemCommand, BatchWriteItemInput, WriteRequest } from '@aws-sdk/client-dynamodb'; export const MAX_WRITE_BATCH_SIZE = 25; @@ -23,18 +22,18 @@ export class BatchWrite extends BatchOperation { protected async doBatchRequest() { const inFlight: Array<[string, WriteRequest]> = []; - const operationInput: BatchWriteItemInput = {RequestItems: {}}; + const operationInput: BatchWriteItemInput = { RequestItems: {} }; let batchSize = 0; while (this.toSend.length > 0) { - const [ - tableName, - marshalled - ] = this.toSend.shift() as [string, WriteRequest]; + const [tableName, marshalled] = this.toSend.shift() as [string, WriteRequest]; inFlight.push([tableName, marshalled]); - if (operationInput.RequestItems[tableName] === undefined) { + if (operationInput?.RequestItems === undefined) { + operationInput.RequestItems = {}; + } + if (operationInput?.RequestItems?.[tableName] === undefined) { operationInput.RequestItems[tableName] = []; } operationInput.RequestItems[tableName].push(marshalled); @@ -43,10 +42,8 @@ export class BatchWrite extends BatchOperation { break; } } - - const { - UnprocessedItems = {} - } = await this.client.batchWriteItem(operationInput).promise(); + const command = new BatchWriteItemCommand(operationInput); + const { UnprocessedItems = {} } = await this.client.send(command); const unprocessedTables = new Set(); for (const table of Object.keys(UnprocessedItems)) { @@ -59,10 +56,7 @@ export class BatchWrite extends BatchOperation { const identifier = itemIdentifier(table, item as WriteRequest); for (let i = inFlight.length - 1; i >= 0; i--) { const [tableName, attributes] = inFlight[i]; - if ( - tableName === table && - itemIdentifier(tableName, attributes) === identifier - ) { + if (tableName === table && itemIdentifier(tableName, attributes) === identifier) { inFlight.splice(i, 1); } } @@ -81,8 +75,7 @@ export class BatchWrite extends BatchOperation { } for (const tableName of processedTables) { - this.state[tableName].backoffFactor = - Math.max(0, this.state[tableName].backoffFactor - 1); + this.state[tableName].backoffFactor = Math.max(0, this.state[tableName].backoffFactor - 1); } } } diff --git a/src/itemIdentifier.ts b/src/itemIdentifier.ts index db38faa8..c98631b0 100644 --- a/src/itemIdentifier.ts +++ b/src/itemIdentifier.ts @@ -1,5 +1,4 @@ -import { WriteRequest } from './types'; -import { AttributeValue } from '@aws-sdk/client-dynamodb'; +import { AttributeValue, WriteRequest } from '@aws-sdk/client-dynamodb'; const bytes = require('utf8-bytes'); /** diff --git a/src/types.ts b/src/types.ts index 56290a34..fc52e861 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,9 +1,4 @@ -import { - AttributeValue, - DeleteRequest, - PutRequest, - WriteRequest as DynamoDbWriteRequest -} from '@aws-sdk/client-dynamodb'; +import { AttributeValue, WriteRequest } from '@aws-sdk/client-dynamodb'; /** * A synchronous or asynchronous iterable. @@ -49,16 +44,16 @@ export interface ThrottledTableConfiguration tableThrottling?: TableThrottlingTracker; } -/** - * A write request for which exactly one of the `PutRequest` and `DeleteRequest` - * properties has been defined. - */ -export type WriteRequest = - | (DynamoDbWriteRequest & { - PutRequest: PutRequest; - DeleteRequest?: undefined; - }) - | (DynamoDbWriteRequest & { - DeleteRequest: DeleteRequest; - PutRequest?: undefined; - }); +// /** +// * A write request for which exactly one of the `PutRequest` and `DeleteRequest` +// * properties has been defined. +// */ +// export type WriteRequest = +// | (DynamoDbWriteRequest & { +// PutRequest: PutRequest; +// DeleteRequest?: undefined; +// }) +// | (DynamoDbWriteRequest & { +// DeleteRequest: DeleteRequest; +// PutRequest?: undefined; +// }); From 4ee80903962163ccd7152e0be6e25297da09fbeb Mon Sep 17 00:00:00 2001 From: bas-d <7903735+bas-d@users.noreply.github.com> Date: Fri, 3 Dec 2021 17:36:36 -0600 Subject: [PATCH 07/17] Update package.json --- docs/.nojekyll | 1 + docs/assets/css/main.css | 865 --------- docs/assets/css/main.css.map | 7 - docs/assets/highlight.css | 78 + docs/assets/icons.css | 1043 ++++++++++ docs/assets/icons.png | Bin 0 -> 9615 bytes docs/assets/icons@2x.png | Bin 0 -> 28144 bytes docs/assets/images/icons.png | Bin 9487 -> 0 bytes docs/assets/images/icons@2x.png | Bin 27740 -> 0 bytes docs/assets/js/main.js | 5 - docs/assets/js/search.js | 3 - docs/assets/main.js | 52 + docs/assets/search.js | 1 + docs/assets/style.css | 1388 ++++++++++++++ docs/assets/{images => }/widgets.png | Bin docs/assets/{images => }/widgets@2x.png | Bin docs/classes/BatchGet.html | 41 + docs/classes/BatchWrite.html | 48 + docs/globals.html | 143 -- docs/index.html | 366 +--- docs/interfaces/BatchGetOptions.html | 5 + docs/interfaces/BatchState.html | 1 + docs/interfaces/PerTableOptions.html | 1 + docs/interfaces/TableOptions.html | 8 + docs/interfaces/TableState.html | 1 + docs/interfaces/TableThrottlingTracker.html | 1 + .../ThrottledTableConfiguration.html | 1 + docs/modules.html | 3 + .../assets/css/main.css | 865 --------- .../assets/css/main.css.map | 7 - .../assets/images/icons.png | Bin 9487 -> 0 bytes .../assets/images/icons@2x.png | Bin 27740 -> 0 bytes .../assets/images/widgets.png | Bin 480 -> 0 bytes .../assets/images/widgets@2x.png | Bin 855 -> 0 bytes .../assets/js/main.js | 5 - .../assets/js/search.js | 3 - .../classes/binaryset.html | 677 ------- .../classes/marshaller.html | 410 ---- .../classes/numbervalue.html | 414 ---- .../classes/numbervalueset.html | 660 ------- .../classes/objectset.html | 669 ------- .../dynamodb-auto-marshaller/globals.html | 428 ----- .../dynamodb-auto-marshaller/index.html | 312 --- .../interfaces/marshallingoptions.html | 283 --- .../unmarshalledlistattributevalue.html | 1698 ----------------- .../unmarshalledmapattributevalue.html | 207 -- .../assets/css/main.css | 865 --------- .../assets/css/main.css.map | 7 - .../assets/images/icons.png | Bin 9487 -> 0 bytes .../assets/images/icons@2x.png | Bin 27740 -> 0 bytes .../assets/images/widgets.png | Bin 480 -> 0 bytes .../assets/images/widgets@2x.png | Bin 855 -> 0 bytes .../dynamodb-batch-iterator/assets/js/main.js | 5 - .../assets/js/search.js | 3 - .../classes/batchget.html | 589 ------ .../classes/batchoperation.html | 591 ------ .../classes/batchwrite.html | 597 ------ .../dynamodb-batch-iterator/globals.html | 349 ---- .../dynamodb-batch-iterator/index.html | 267 --- .../interfaces/batchgetoptions.html | 261 --- .../interfaces/batchstate.html | 226 --- .../interfaces/pertableoptions.html | 210 -- .../interfaces/tableoptions.html | 281 --- .../interfaces/tablestate.html | 328 ---- .../interfaces/tablethrottlingtracker.html | 267 --- .../throttledtableconfiguration.html | 334 ---- .../assets/css/main.css | 865 --------- .../assets/css/main.css.map | 7 - .../assets/images/icons.png | Bin 9487 -> 0 bytes .../assets/images/icons@2x.png | Bin 27740 -> 0 bytes .../assets/images/widgets.png | Bin 480 -> 0 bytes .../assets/images/widgets@2x.png | Bin 855 -> 0 bytes .../assets/js/main.js | 5 - .../assets/js/search.js | 3 - .../classes/author.html | 253 --- .../classes/comment.html | 363 ---- .../classes/post.html | 370 ---- .../globals.html | 439 ----- .../index.html | 321 ---- .../interfaces/classannotation.html | 216 --- .../interfaces/propertyannotation.html | 219 --- .../dynamodb-data-mapper/assets/css/main.css | 865 --------- .../assets/css/main.css.map | 7 - .../assets/images/icons.png | Bin 9487 -> 0 bytes .../assets/images/icons@2x.png | Bin 27740 -> 0 bytes .../assets/images/widgets.png | Bin 480 -> 0 bytes .../assets/images/widgets@2x.png | Bin 855 -> 0 bytes .../dynamodb-data-mapper/assets/js/main.js | 5 - .../dynamodb-data-mapper/assets/js/search.js | 3 - .../classes/datamapper.html | 1246 ------------ .../classes/itemnotfoundexception.html | 318 --- .../classes/iterator.html | 467 ----- .../classes/paginator.html | 444 ----- .../classes/parallelscaniterator.html | 484 ----- .../classes/parallelscanpaginator.html | 484 ----- .../classes/queryiterator.html | 484 ----- .../classes/querypaginator.html | 460 ----- .../classes/scaniterator.html | 481 ----- .../classes/scanpaginator.html | 457 ----- .../dynamodb-data-mapper/globals.html | 905 --------- docs/packages/dynamodb-data-mapper/index.html | 999 ---------- .../interfaces/basescanoptions.html | 292 --- .../interfaces/basesequentialscanoptions.html | 380 ---- .../interfaces/batchgetoptions.html | 223 --- .../interfaces/batchgettableoptions.html | 248 --- .../interfaces/batchstate.html | 175 -- .../interfaces/createtableoptions.html | 241 --- .../interfaces/ctorbearer.html | 206 -- .../interfaces/datamapperconfiguration.html | 257 --- .../interfaces/deleteoptions.html | 243 --- .../interfaces/deleteparameters.html | 281 --- .../interfaces/documenttypeoptions.html | 214 --- .../executeupdateexpressionoptions.html | 198 -- .../interfaces/getoptions.html | 230 --- .../interfaces/getparameters.html | 258 --- .../globalsecondaryindexoptions.html | 245 --- .../interfaces/initializedscanstate.html | 215 --- .../localsecondaryindexoptions.html | 212 -- .../interfaces/parallelscanoptions.html | 327 ---- .../interfaces/parallelscanworkeroptions.html | 360 ---- .../interfaces/perindexoptions.html | 165 -- .../interfaces/provisionedthroughput.html | 214 --- .../interfaces/putoptions.html | 224 --- .../interfaces/putparameters.html | 261 --- .../interfaces/queryoptions.html | 355 ---- .../interfaces/queryparameters.html | 413 ---- .../readconsistencyconfiguration.html | 211 -- .../interfaces/scanoptions.html | 360 ---- .../sharedsecondaryindexoptions.html | 200 -- .../interfaces/stringtoanyobjectmap.html | 165 -- .../interfaces/uninitializedscanstate.html | 214 --- .../interfaces/updateoptions.html | 244 --- .../interfaces/updateparameters.html | 274 --- .../assets/css/main.css | 865 --------- .../assets/css/main.css.map | 7 - .../assets/images/icons.png | Bin 9487 -> 0 bytes .../assets/images/icons@2x.png | Bin 27740 -> 0 bytes .../assets/images/widgets.png | Bin 480 -> 0 bytes .../assets/images/widgets@2x.png | Bin 855 -> 0 bytes .../assets/js/main.js | 5 - .../assets/js/search.js | 3 - .../classes/invalidschemaerror.html | 303 --- .../classes/invalidvalueerror.html | 303 --- .../dynamodb-data-marshaller/globals.html | 1175 ------------ .../dynamodb-data-marshaller/index.html | 600 ------ .../interfaces/anytype.html | 330 ---- .../interfaces/attributetypemap.html | 165 -- .../interfaces/basetype.html | 301 --- .../interfaces/binarytype.html | 294 --- .../interfaces/booleantype.html | 249 --- .../interfaces/collectiontype.html | 320 ---- .../interfaces/customtype.html | 417 ---- .../interfaces/datetype.html | 296 --- .../interfaces/documenttype.html | 298 --- .../interfaces/hashtype.html | 320 ---- .../interfaces/keyabletype.html | 241 --- .../interfaces/keyschema.html | 220 --- .../interfaces/keytypemap.html | 165 -- .../interfaces/listtype.html | 286 --- .../interfaces/maptype.html | 280 --- .../interfaces/marshalledexpression.html | 245 --- .../interfaces/nulltype.html | 249 --- .../interfaces/numbertype.html | 309 --- .../interfaces/perindexkeys.html | 165 -- .../interfaces/schema.html | 179 -- .../interfaces/settype.html | 256 --- .../interfaces/stringtype.html | 293 --- .../interfaces/tupletype.html | 272 --- .../interfaces/zeroargumentsconstructor.html | 214 --- .../dynamodb-expressions/assets/css/main.css | 865 --------- .../assets/css/main.css.map | 7 - .../assets/images/icons.png | Bin 9487 -> 0 bytes .../assets/images/icons@2x.png | Bin 27740 -> 0 bytes .../assets/images/widgets.png | Bin 480 -> 0 bytes .../assets/images/widgets@2x.png | Bin 855 -> 0 bytes .../dynamodb-expressions/assets/js/main.js | 5 - .../dynamodb-expressions/assets/js/search.js | 3 - .../classes/attributepath.html | 294 --- .../classes/attributevalue.html | 285 --- .../classes/expressionattributes.html | 303 --- .../classes/functionexpression.html | 339 ---- .../classes/mathematicalexpression.html | 356 ---- .../classes/updateexpression.html | 420 ---- .../dynamodb-expressions/globals.html | 933 --------- docs/packages/dynamodb-expressions/index.html | 725 ------- .../interfaces/andexpression.html | 214 --- .../attributebearingexpression.html | 221 --- .../interfaces/attributeexistspredicate.html | 221 --- .../interfaces/attributename.html | 214 --- .../attributenotexistspredicate.html | 221 --- .../interfaces/attributetypepredicate.html | 234 --- .../basefunctionexpressionpredicate.html | 230 --- .../interfaces/beginswithpredicate.html | 235 --- .../betweenexpressionpredicate.html | 227 --- .../interfaces/binarycomparisonpredicate.html | 217 --- .../conditionexpressionsubject.html | 197 -- .../interfaces/containspredicate.html | 235 --- .../equalityexpressionpredicate.html | 224 --- .../greaterthanexpressionpredicate.html | 224 --- ...eaterthanorequaltoexpressionpredicate.html | 225 --- .../inequalityexpressionpredicate.html | 224 --- .../lessthanexpressionpredicate.html | 224 --- .../lessthanorequaltoexpressionpredicate.html | 225 --- .../interfaces/listindex.html | 213 --- .../membershipexpressionpredicate.html | 214 --- .../interfaces/notexpression.html | 213 --- .../interfaces/orexpression.html | 214 --- .../assets/css/main.css | 865 --------- .../assets/css/main.css.map | 7 - .../assets/images/icons.png | Bin 9487 -> 0 bytes .../assets/images/icons@2x.png | Bin 27740 -> 0 bytes .../assets/images/widgets.png | Bin 480 -> 0 bytes .../assets/images/widgets@2x.png | Bin 855 -> 0 bytes .../dynamodb-query-iterator/assets/js/main.js | 5 - .../assets/js/search.js | 3 - .../classes/dynamodbpaginator.html | 548 ------ .../classes/itemiterator.html | 490 ----- .../classes/parallelscaniterator.html | 490 ----- .../classes/parallelscanpaginator.html | 487 ----- .../classes/queryiterator.html | 490 ----- .../classes/querypaginator.html | 556 ------ .../classes/scaniterator.html | 490 ----- .../classes/scanpaginator.html | 556 ------ .../dynamodb-query-iterator/globals.html | 310 --- .../dynamodb-query-iterator/index.html | 496 ----- .../dynamodbpaginatorinterface.html | 414 ---- .../interfaces/dynamodbresultspage.html | 337 ---- .../interfaces/initializedscanstate.html | 260 --- .../interfaces/parallelscaninput.html | 550 ------ .../interfaces/uninitializedscanstate.html | 259 --- package.json | 14 +- tsconfig.json | 52 +- 232 files changed, 2759 insertions(+), 59101 deletions(-) create mode 100644 docs/.nojekyll delete mode 100644 docs/assets/css/main.css delete mode 100644 docs/assets/css/main.css.map create mode 100644 docs/assets/highlight.css create mode 100644 docs/assets/icons.css create mode 100644 docs/assets/icons.png create mode 100644 docs/assets/icons@2x.png delete mode 100644 docs/assets/images/icons.png delete mode 100644 docs/assets/images/icons@2x.png delete mode 100644 docs/assets/js/main.js delete mode 100644 docs/assets/js/search.js create mode 100644 docs/assets/main.js create mode 100644 docs/assets/search.js create mode 100644 docs/assets/style.css rename docs/assets/{images => }/widgets.png (100%) rename docs/assets/{images => }/widgets@2x.png (100%) create mode 100644 docs/classes/BatchGet.html create mode 100644 docs/classes/BatchWrite.html delete mode 100644 docs/globals.html create mode 100644 docs/interfaces/BatchGetOptions.html create mode 100644 docs/interfaces/BatchState.html create mode 100644 docs/interfaces/PerTableOptions.html create mode 100644 docs/interfaces/TableOptions.html create mode 100644 docs/interfaces/TableState.html create mode 100644 docs/interfaces/TableThrottlingTracker.html create mode 100644 docs/interfaces/ThrottledTableConfiguration.html create mode 100644 docs/modules.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/assets/css/main.css delete mode 100644 docs/packages/dynamodb-auto-marshaller/assets/css/main.css.map delete mode 100644 docs/packages/dynamodb-auto-marshaller/assets/images/icons.png delete mode 100644 docs/packages/dynamodb-auto-marshaller/assets/images/icons@2x.png delete mode 100644 docs/packages/dynamodb-auto-marshaller/assets/images/widgets.png delete mode 100644 docs/packages/dynamodb-auto-marshaller/assets/images/widgets@2x.png delete mode 100644 docs/packages/dynamodb-auto-marshaller/assets/js/main.js delete mode 100644 docs/packages/dynamodb-auto-marshaller/assets/js/search.js delete mode 100644 docs/packages/dynamodb-auto-marshaller/classes/binaryset.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/classes/marshaller.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/classes/numbervalue.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/classes/numbervalueset.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/classes/objectset.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/globals.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/index.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/interfaces/marshallingoptions.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/interfaces/unmarshalledlistattributevalue.html delete mode 100644 docs/packages/dynamodb-auto-marshaller/interfaces/unmarshalledmapattributevalue.html delete mode 100644 docs/packages/dynamodb-batch-iterator/assets/css/main.css delete mode 100644 docs/packages/dynamodb-batch-iterator/assets/css/main.css.map delete mode 100644 docs/packages/dynamodb-batch-iterator/assets/images/icons.png delete mode 100644 docs/packages/dynamodb-batch-iterator/assets/images/icons@2x.png delete mode 100644 docs/packages/dynamodb-batch-iterator/assets/images/widgets.png delete mode 100644 docs/packages/dynamodb-batch-iterator/assets/images/widgets@2x.png delete mode 100644 docs/packages/dynamodb-batch-iterator/assets/js/main.js delete mode 100644 docs/packages/dynamodb-batch-iterator/assets/js/search.js delete mode 100644 docs/packages/dynamodb-batch-iterator/classes/batchget.html delete mode 100644 docs/packages/dynamodb-batch-iterator/classes/batchoperation.html delete mode 100644 docs/packages/dynamodb-batch-iterator/classes/batchwrite.html delete mode 100644 docs/packages/dynamodb-batch-iterator/globals.html delete mode 100644 docs/packages/dynamodb-batch-iterator/index.html delete mode 100644 docs/packages/dynamodb-batch-iterator/interfaces/batchgetoptions.html delete mode 100644 docs/packages/dynamodb-batch-iterator/interfaces/batchstate.html delete mode 100644 docs/packages/dynamodb-batch-iterator/interfaces/pertableoptions.html delete mode 100644 docs/packages/dynamodb-batch-iterator/interfaces/tableoptions.html delete mode 100644 docs/packages/dynamodb-batch-iterator/interfaces/tablestate.html delete mode 100644 docs/packages/dynamodb-batch-iterator/interfaces/tablethrottlingtracker.html delete mode 100644 docs/packages/dynamodb-batch-iterator/interfaces/throttledtableconfiguration.html delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/assets/css/main.css delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/assets/css/main.css.map delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/assets/images/icons.png delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/assets/images/icons@2x.png delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/assets/images/widgets.png delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/assets/images/widgets@2x.png delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/assets/js/main.js delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/assets/js/search.js delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/classes/author.html delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/classes/comment.html delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/classes/post.html delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/globals.html delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/index.html delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/interfaces/classannotation.html delete mode 100644 docs/packages/dynamodb-data-mapper-annotations/interfaces/propertyannotation.html delete mode 100644 docs/packages/dynamodb-data-mapper/assets/css/main.css delete mode 100644 docs/packages/dynamodb-data-mapper/assets/css/main.css.map delete mode 100644 docs/packages/dynamodb-data-mapper/assets/images/icons.png delete mode 100644 docs/packages/dynamodb-data-mapper/assets/images/icons@2x.png delete mode 100644 docs/packages/dynamodb-data-mapper/assets/images/widgets.png delete mode 100644 docs/packages/dynamodb-data-mapper/assets/images/widgets@2x.png delete mode 100644 docs/packages/dynamodb-data-mapper/assets/js/main.js delete mode 100644 docs/packages/dynamodb-data-mapper/assets/js/search.js delete mode 100644 docs/packages/dynamodb-data-mapper/classes/datamapper.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/itemnotfoundexception.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/iterator.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/paginator.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/parallelscaniterator.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/parallelscanpaginator.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/queryiterator.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/querypaginator.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/scaniterator.html delete mode 100644 docs/packages/dynamodb-data-mapper/classes/scanpaginator.html delete mode 100644 docs/packages/dynamodb-data-mapper/globals.html delete mode 100644 docs/packages/dynamodb-data-mapper/index.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/basescanoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/basesequentialscanoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/batchgetoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/batchgettableoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/batchstate.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/createtableoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/ctorbearer.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/datamapperconfiguration.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/deleteoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/deleteparameters.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/documenttypeoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/executeupdateexpressionoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/getoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/getparameters.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/globalsecondaryindexoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/initializedscanstate.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/localsecondaryindexoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/parallelscanoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/parallelscanworkeroptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/perindexoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/provisionedthroughput.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/putoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/putparameters.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/queryoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/queryparameters.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/readconsistencyconfiguration.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/scanoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/sharedsecondaryindexoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/stringtoanyobjectmap.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/uninitializedscanstate.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/updateoptions.html delete mode 100644 docs/packages/dynamodb-data-mapper/interfaces/updateparameters.html delete mode 100644 docs/packages/dynamodb-data-marshaller/assets/css/main.css delete mode 100644 docs/packages/dynamodb-data-marshaller/assets/css/main.css.map delete mode 100644 docs/packages/dynamodb-data-marshaller/assets/images/icons.png delete mode 100644 docs/packages/dynamodb-data-marshaller/assets/images/icons@2x.png delete mode 100644 docs/packages/dynamodb-data-marshaller/assets/images/widgets.png delete mode 100644 docs/packages/dynamodb-data-marshaller/assets/images/widgets@2x.png delete mode 100644 docs/packages/dynamodb-data-marshaller/assets/js/main.js delete mode 100644 docs/packages/dynamodb-data-marshaller/assets/js/search.js delete mode 100644 docs/packages/dynamodb-data-marshaller/classes/invalidschemaerror.html delete mode 100644 docs/packages/dynamodb-data-marshaller/classes/invalidvalueerror.html delete mode 100644 docs/packages/dynamodb-data-marshaller/globals.html delete mode 100644 docs/packages/dynamodb-data-marshaller/index.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/anytype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/attributetypemap.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/basetype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/binarytype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/booleantype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/collectiontype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/customtype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/datetype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/documenttype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/hashtype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/keyabletype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/keyschema.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/keytypemap.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/listtype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/maptype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/marshalledexpression.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/nulltype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/numbertype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/perindexkeys.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/schema.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/settype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/stringtype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/tupletype.html delete mode 100644 docs/packages/dynamodb-data-marshaller/interfaces/zeroargumentsconstructor.html delete mode 100644 docs/packages/dynamodb-expressions/assets/css/main.css delete mode 100644 docs/packages/dynamodb-expressions/assets/css/main.css.map delete mode 100644 docs/packages/dynamodb-expressions/assets/images/icons.png delete mode 100644 docs/packages/dynamodb-expressions/assets/images/icons@2x.png delete mode 100644 docs/packages/dynamodb-expressions/assets/images/widgets.png delete mode 100644 docs/packages/dynamodb-expressions/assets/images/widgets@2x.png delete mode 100644 docs/packages/dynamodb-expressions/assets/js/main.js delete mode 100644 docs/packages/dynamodb-expressions/assets/js/search.js delete mode 100644 docs/packages/dynamodb-expressions/classes/attributepath.html delete mode 100644 docs/packages/dynamodb-expressions/classes/attributevalue.html delete mode 100644 docs/packages/dynamodb-expressions/classes/expressionattributes.html delete mode 100644 docs/packages/dynamodb-expressions/classes/functionexpression.html delete mode 100644 docs/packages/dynamodb-expressions/classes/mathematicalexpression.html delete mode 100644 docs/packages/dynamodb-expressions/classes/updateexpression.html delete mode 100644 docs/packages/dynamodb-expressions/globals.html delete mode 100644 docs/packages/dynamodb-expressions/index.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/andexpression.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/attributebearingexpression.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/attributeexistspredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/attributename.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/attributenotexistspredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/attributetypepredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/basefunctionexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/beginswithpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/betweenexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/binarycomparisonpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/conditionexpressionsubject.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/containspredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/equalityexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/greaterthanexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/greaterthanorequaltoexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/inequalityexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/lessthanexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/lessthanorequaltoexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/listindex.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/membershipexpressionpredicate.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/notexpression.html delete mode 100644 docs/packages/dynamodb-expressions/interfaces/orexpression.html delete mode 100644 docs/packages/dynamodb-query-iterator/assets/css/main.css delete mode 100644 docs/packages/dynamodb-query-iterator/assets/css/main.css.map delete mode 100644 docs/packages/dynamodb-query-iterator/assets/images/icons.png delete mode 100644 docs/packages/dynamodb-query-iterator/assets/images/icons@2x.png delete mode 100644 docs/packages/dynamodb-query-iterator/assets/images/widgets.png delete mode 100644 docs/packages/dynamodb-query-iterator/assets/images/widgets@2x.png delete mode 100644 docs/packages/dynamodb-query-iterator/assets/js/main.js delete mode 100644 docs/packages/dynamodb-query-iterator/assets/js/search.js delete mode 100644 docs/packages/dynamodb-query-iterator/classes/dynamodbpaginator.html delete mode 100644 docs/packages/dynamodb-query-iterator/classes/itemiterator.html delete mode 100644 docs/packages/dynamodb-query-iterator/classes/parallelscaniterator.html delete mode 100644 docs/packages/dynamodb-query-iterator/classes/parallelscanpaginator.html delete mode 100644 docs/packages/dynamodb-query-iterator/classes/queryiterator.html delete mode 100644 docs/packages/dynamodb-query-iterator/classes/querypaginator.html delete mode 100644 docs/packages/dynamodb-query-iterator/classes/scaniterator.html delete mode 100644 docs/packages/dynamodb-query-iterator/classes/scanpaginator.html delete mode 100644 docs/packages/dynamodb-query-iterator/globals.html delete mode 100644 docs/packages/dynamodb-query-iterator/index.html delete mode 100644 docs/packages/dynamodb-query-iterator/interfaces/dynamodbpaginatorinterface.html delete mode 100644 docs/packages/dynamodb-query-iterator/interfaces/dynamodbresultspage.html delete mode 100644 docs/packages/dynamodb-query-iterator/interfaces/initializedscanstate.html delete mode 100644 docs/packages/dynamodb-query-iterator/interfaces/parallelscaninput.html delete mode 100644 docs/packages/dynamodb-query-iterator/interfaces/uninitializedscanstate.html diff --git a/docs/.nojekyll b/docs/.nojekyll new file mode 100644 index 00000000..e2ac6616 --- /dev/null +++ b/docs/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/assets/css/main.css b/docs/assets/css/main.css deleted file mode 100644 index 48b3645c..00000000 --- a/docs/assets/css/main.css +++ /dev/null @@ -1,865 +0,0 @@ -/*! normalize.css v1.1.3 | MIT License | git.io/normalize */ -/* ========================================================================== HTML5 display definitions ========================================================================== */ -/** Correct `block` display not defined in IE 6/7/8/9 and Firefox 3. */ -article, aside, details, figcaption, figure, footer, header, hgroup, main, nav, section, summary { display: block; } - -/** Correct `inline-block` display not defined in IE 6/7/8/9 and Firefox 3. */ -audio, canvas, video { display: inline-block; *display: inline; *zoom: 1; } - -/** Prevent modern browsers from displaying `audio` without controls. Remove excess height in iOS 5 devices. */ -audio:not([controls]) { display: none; height: 0; } - -/** Address styling not present in IE 7/8/9, Firefox 3, and Safari 4. Known issue: no IE 6 support. */ -[hidden] { display: none; } - -/* ========================================================================== Base ========================================================================== */ -/** 1. Correct text resizing oddly in IE 6/7 when body `font-size` is set using `em` units. 2. Prevent iOS text size adjust after orientation change, without disabling user zoom. */ -html { font-size: 100%; /* 1 */ -ms-text-size-adjust: 100%; /* 2 */ -webkit-text-size-adjust: 100%; /* 2 */ font-family: sans-serif; } - -/** Address `font-family` inconsistency between `textarea` and other form elements. */ -button, input, select, textarea { font-family: sans-serif; } - -/** Address margins handled incorrectly in IE 6/7. */ -body { margin: 0; } - -/* ========================================================================== Links ========================================================================== */ -/** Address `outline` inconsistency between Chrome and other browsers. */ -a:focus { outline: thin dotted; } -a:active, a:hover { outline: 0; } - -/** Improve readability when focused and also mouse hovered in all browsers. */ -/* ========================================================================== Typography ========================================================================== */ -/** Address font sizes and margins set differently in IE 6/7. Address font sizes within `section` and `article` in Firefox 4+, Safari 5, and Chrome. */ -h1 { font-size: 2em; margin: 0.67em 0; } - -h2 { font-size: 1.5em; margin: 0.83em 0; } - -h3 { font-size: 1.17em; margin: 1em 0; } - -h4, .tsd-index-panel h3 { font-size: 1em; margin: 1.33em 0; } - -h5 { font-size: 0.83em; margin: 1.67em 0; } - -h6 { font-size: 0.67em; margin: 2.33em 0; } - -/** Address styling not present in IE 7/8/9, Safari 5, and Chrome. */ -abbr[title] { border-bottom: 1px dotted; } - -/** Address style set to `bolder` in Firefox 3+, Safari 4/5, and Chrome. */ -b, strong { font-weight: bold; } - -blockquote { margin: 1em 40px; } - -/** Address styling not present in Safari 5 and Chrome. */ -dfn { font-style: italic; } - -/** Address differences between Firefox and other browsers. Known issue: no IE 6/7 normalization. */ -hr { box-sizing: content-box; height: 0; } - -/** Address styling not present in IE 6/7/8/9. */ -mark { background: #ff0; color: #000; } - -/** Address margins set differently in IE 6/7. */ -p, pre { margin: 1em 0; } - -/** Correct font family set oddly in IE 6, Safari 4/5, and Chrome. */ -code, kbd, pre, samp { font-family: monospace, serif; _font-family: "courier new", monospace; font-size: 1em; } - -/** Improve readability of pre-formatted text in all browsers. */ -pre { white-space: pre; white-space: pre-wrap; word-wrap: break-word; } - -/** Address CSS quotes not supported in IE 6/7. */ -q { quotes: none; } -q:before, q:after { content: ""; content: none; } - -/** Address `quotes` property not supported in Safari 4. */ -/** Address inconsistent and variable font size in all browsers. */ -small { font-size: 80%; } - -/** Prevent `sub` and `sup` affecting `line-height` in all browsers. */ -sub { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; } - -sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; top: -0.5em; } - -sub { bottom: -0.25em; } - -/* ========================================================================== Lists ========================================================================== */ -/** Address margins set differently in IE 6/7. */ -dl, menu, ol, ul { margin: 1em 0; } - -dd { margin: 0 0 0 40px; } - -/** Address paddings set differently in IE 6/7. */ -menu, ol, ul { padding: 0 0 0 40px; } - -/** Correct list images handled incorrectly in IE 7. */ -nav ul, nav ol { list-style: none; list-style-image: none; } - -/* ========================================================================== Embedded content ========================================================================== */ -/** 1. Remove border when inside `a` element in IE 6/7/8/9 and Firefox 3. 2. Improve image quality when scaled in IE 7. */ -img { border: 0; /* 1 */ -ms-interpolation-mode: bicubic; } - -/* 2 */ -/** Correct overflow displayed oddly in IE 9. */ -svg:not(:root) { overflow: hidden; } - -/* ========================================================================== Figures ========================================================================== */ -/** Address margin not present in IE 6/7/8/9, Safari 5, and Opera 11. */ -figure, form { margin: 0; } - -/* ========================================================================== Forms ========================================================================== */ -/** Correct margin displayed oddly in IE 6/7. */ -/** Define consistent border, margin, and padding. */ -fieldset { border: 1px solid #c0c0c0; margin: 0 2px; padding: 0.35em 0.625em 0.75em; } - -/** 1. Correct color not being inherited in IE 6/7/8/9. 2. Correct text not wrapping in Firefox 3. 3. Correct alignment displayed oddly in IE 6/7. */ -legend { border: 0; /* 1 */ padding: 0; white-space: normal; /* 2 */ *margin-left: -7px; } - -/* 3 */ -/** 1. Correct font size not being inherited in all browsers. 2. Address margins set differently in IE 6/7, Firefox 3+, Safari 5, and Chrome. 3. Improve appearance and consistency in all browsers. */ -button, input, select, textarea { font-size: 100%; /* 1 */ margin: 0; /* 2 */ vertical-align: baseline; /* 3 */ *vertical-align: middle; } - -/* 3 */ -/** Address Firefox 3+ setting `line-height` on `input` using `!important` in the UA stylesheet. */ -button, input { line-height: normal; } - -/** Address inconsistent `text-transform` inheritance for `button` and `select`. All other form control elements do not inherit `text-transform` values. Correct `button` style inheritance in Chrome, Safari 5+, and IE 6+. Correct `select` style inheritance in Firefox 4+ and Opera. */ -button, select { text-transform: none; } - -/** 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` and `video` controls. 2. Correct inability to style clickable `input` types in iOS. 3. Improve usability and consistency of cursor style between image-type `input` and others. 4. Remove inner spacing in IE 7 without affecting normal text inputs. Known issue: inner spacing remains in IE 6. */ -button, html input[type="button"] { -webkit-appearance: button; /* 2 */ cursor: pointer; /* 3 */ *overflow: visible; } - -/* 4 */ -input[type="reset"], input[type="submit"] { -webkit-appearance: button; /* 2 */ cursor: pointer; /* 3 */ *overflow: visible; } - -/* 4 */ -/** Re-set default cursor for disabled elements. */ -button[disabled], html input[disabled] { cursor: default; } - -/** 1. Address box sizing set to content-box in IE 8/9. 2. Remove excess padding in IE 8/9. 3. Remove excess padding in IE 7. Known issue: excess padding remains in IE 6. */ -input { /* 3 */ } -input[type="checkbox"], input[type="radio"] { box-sizing: border-box; /* 1 */ padding: 0; /* 2 */ *height: 13px; /* 3 */ *width: 13px; } -input[type="search"] { -webkit-appearance: textfield; /* 1 */ /* 2 */ box-sizing: content-box; } -input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration { -webkit-appearance: none; } - -/** 1. Address `appearance` set to `searchfield` in Safari 5 and Chrome. 2. Address `box-sizing` set to `border-box` in Safari 5 and Chrome (include `-moz` to future-proof). */ -/** Remove inner padding and search cancel button in Safari 5 and Chrome on OS X. */ -/** Remove inner padding and border in Firefox 3+. */ -button::-moz-focus-inner, input::-moz-focus-inner { border: 0; padding: 0; } - -/** 1. Remove default vertical scrollbar in IE 6/7/8/9. 2. Improve readability and alignment in all browsers. */ -textarea { overflow: auto; /* 1 */ vertical-align: top; } - -/* 2 */ -/* ========================================================================== Tables ========================================================================== */ -/** Remove most spacing between table cells. */ -table { border-collapse: collapse; border-spacing: 0; } - -/* Visual Studio-like style based on original C# coloring by Jason Diamond */ -.hljs { display: inline-block; padding: 0.5em; background: white; color: black; } - -.hljs-comment, .hljs-annotation, .hljs-template_comment, .diff .hljs-header, .hljs-chunk, .apache .hljs-cbracket { color: #008000; } - -.hljs-keyword, .hljs-id, .hljs-built_in, .css .smalltalk .hljs-class, .hljs-winutils, .bash .hljs-variable, .tex .hljs-command, .hljs-request, .hljs-status, .nginx .hljs-title { color: #00f; } - -.xml .hljs-tag { color: #00f; } -.xml .hljs-tag .hljs-value { color: #00f; } - -.hljs-string, .hljs-title, .hljs-parent, .hljs-tag .hljs-value, .hljs-rules .hljs-value { color: #a31515; } - -.ruby .hljs-symbol { color: #a31515; } -.ruby .hljs-symbol .hljs-string { color: #a31515; } - -.hljs-template_tag, .django .hljs-variable, .hljs-addition, .hljs-flow, .hljs-stream, .apache .hljs-tag, .hljs-date, .tex .hljs-formula, .coffeescript .hljs-attribute { color: #a31515; } - -.ruby .hljs-string, .hljs-decorator, .hljs-filter .hljs-argument, .hljs-localvars, .hljs-array, .hljs-attr_selector, .hljs-pseudo, .hljs-pi, .hljs-doctype, .hljs-deletion, .hljs-envvar, .hljs-shebang, .hljs-preprocessor, .hljs-pragma, .userType, .apache .hljs-sqbracket, .nginx .hljs-built_in, .tex .hljs-special, .hljs-prompt { color: #2b91af; } - -.hljs-phpdoc, .hljs-javadoc, .hljs-xmlDocTag { color: #808080; } - -.vhdl .hljs-typename { font-weight: bold; } -.vhdl .hljs-string { color: #666666; } -.vhdl .hljs-literal { color: #a31515; } -.vhdl .hljs-attribute { color: #00b0e8; } - -.xml .hljs-attribute { color: #f00; } - -.col > :first-child, .col-1 > :first-child, .col-2 > :first-child, .col-3 > :first-child, .col-4 > :first-child, .col-5 > :first-child, .col-6 > :first-child, .col-7 > :first-child, .col-8 > :first-child, .col-9 > :first-child, .col-10 > :first-child, .col-11 > :first-child, .tsd-panel > :first-child, ul.tsd-descriptions > li > :first-child, .col > :first-child > :first-child, .col-1 > :first-child > :first-child, .col-2 > :first-child > :first-child, .col-3 > :first-child > :first-child, .col-4 > :first-child > :first-child, .col-5 > :first-child > :first-child, .col-6 > :first-child > :first-child, .col-7 > :first-child > :first-child, .col-8 > :first-child > :first-child, .col-9 > :first-child > :first-child, .col-10 > :first-child > :first-child, .col-11 > :first-child > :first-child, .tsd-panel > :first-child > :first-child, ul.tsd-descriptions > li > :first-child > :first-child, .col > :first-child > :first-child > :first-child, .col-1 > :first-child > :first-child > :first-child, .col-2 > :first-child > :first-child > :first-child, .col-3 > :first-child > :first-child > :first-child, .col-4 > :first-child > :first-child > :first-child, .col-5 > :first-child > :first-child > :first-child, .col-6 > :first-child > :first-child > :first-child, .col-7 > :first-child > :first-child > :first-child, .col-8 > :first-child > :first-child > :first-child, .col-9 > :first-child > :first-child > :first-child, .col-10 > :first-child > :first-child > :first-child, .col-11 > :first-child > :first-child > :first-child, .tsd-panel > :first-child > :first-child > :first-child, ul.tsd-descriptions > li > :first-child > :first-child > :first-child { margin-top: 0; } -.col > :last-child, .col-1 > :last-child, .col-2 > :last-child, .col-3 > :last-child, .col-4 > :last-child, .col-5 > :last-child, .col-6 > :last-child, .col-7 > :last-child, .col-8 > :last-child, .col-9 > :last-child, .col-10 > :last-child, .col-11 > :last-child, .tsd-panel > :last-child, ul.tsd-descriptions > li > :last-child, .col > :last-child > :last-child, .col-1 > :last-child > :last-child, .col-2 > :last-child > :last-child, .col-3 > :last-child > :last-child, .col-4 > :last-child > :last-child, .col-5 > :last-child > :last-child, .col-6 > :last-child > :last-child, .col-7 > :last-child > :last-child, .col-8 > :last-child > :last-child, .col-9 > :last-child > :last-child, .col-10 > :last-child > :last-child, .col-11 > :last-child > :last-child, .tsd-panel > :last-child > :last-child, ul.tsd-descriptions > li > :last-child > :last-child, .col > :last-child > :last-child > :last-child, .col-1 > :last-child > :last-child > :last-child, .col-2 > :last-child > :last-child > :last-child, .col-3 > :last-child > :last-child > :last-child, .col-4 > :last-child > :last-child > :last-child, .col-5 > :last-child > :last-child > :last-child, .col-6 > :last-child > :last-child > :last-child, .col-7 > :last-child > :last-child > :last-child, .col-8 > :last-child > :last-child > :last-child, .col-9 > :last-child > :last-child > :last-child, .col-10 > :last-child > :last-child > :last-child, .col-11 > :last-child > :last-child > :last-child, .tsd-panel > :last-child > :last-child > :last-child, ul.tsd-descriptions > li > :last-child > :last-child > :last-child { margin-bottom: 0; } - -.container { max-width: 1200px; margin: 0 auto; padding: 0 40px; } -@media (max-width: 640px) { .container { padding: 0 20px; } } - -.container-main { padding-bottom: 200px; } - -.row { position: relative; margin: 0 -10px; } -.row:after { visibility: hidden; display: block; content: ""; clear: both; height: 0; } - -.col, .col-1, .col-2, .col-3, .col-4, .col-5, .col-6, .col-7, .col-8, .col-9, .col-10, .col-11 { box-sizing: border-box; float: left; padding: 0 10px; } - -.col-1 { width: 8.33333%; } - -.offset-1 { margin-left: 8.33333%; } - -.col-2 { width: 16.66667%; } - -.offset-2 { margin-left: 16.66667%; } - -.col-3 { width: 25%; } - -.offset-3 { margin-left: 25%; } - -.col-4 { width: 33.33333%; } - -.offset-4 { margin-left: 33.33333%; } - -.col-5 { width: 41.66667%; } - -.offset-5 { margin-left: 41.66667%; } - -.col-6 { width: 50%; } - -.offset-6 { margin-left: 50%; } - -.col-7 { width: 58.33333%; } - -.offset-7 { margin-left: 58.33333%; } - -.col-8 { width: 66.66667%; } - -.offset-8 { margin-left: 66.66667%; } - -.col-9 { width: 75%; } - -.offset-9 { margin-left: 75%; } - -.col-10 { width: 83.33333%; } - -.offset-10 { margin-left: 83.33333%; } - -.col-11 { width: 91.66667%; } - -.offset-11 { margin-left: 91.66667%; } - -.tsd-kind-icon { display: block; position: relative; padding-left: 20px; text-indent: -20px; } -.tsd-kind-icon:before { content: ''; display: inline-block; vertical-align: middle; width: 17px; height: 17px; margin: 0 3px 2px 0; background-image: url(../images/icons.png); } -@media (-webkit-min-device-pixel-ratio: 1.5), (min-device-pixel-ratio: 1.5), (min-resolution: 144dpi) { .tsd-kind-icon:before { background-image: url(../images/icons@2x.png); background-size: 238px 204px; } } - -.tsd-signature.tsd-kind-icon:before { background-position: 0 -153px; } - -.tsd-kind-object-literal > .tsd-kind-icon:before { background-position: 0px -17px; } -.tsd-kind-object-literal.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -17px; } -.tsd-kind-object-literal.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -17px; } - -.tsd-kind-class > .tsd-kind-icon:before { background-position: 0px -34px; } -.tsd-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -34px; } -.tsd-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -34px; } - -.tsd-kind-class.tsd-has-type-parameter > .tsd-kind-icon:before { background-position: 0px -51px; } -.tsd-kind-class.tsd-has-type-parameter.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -51px; } -.tsd-kind-class.tsd-has-type-parameter.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -51px; } - -.tsd-kind-interface > .tsd-kind-icon:before { background-position: 0px -68px; } -.tsd-kind-interface.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -68px; } -.tsd-kind-interface.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -68px; } - -.tsd-kind-interface.tsd-has-type-parameter > .tsd-kind-icon:before { background-position: 0px -85px; } -.tsd-kind-interface.tsd-has-type-parameter.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -85px; } -.tsd-kind-interface.tsd-has-type-parameter.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -85px; } - -.tsd-kind-module > .tsd-kind-icon:before { background-position: 0px -102px; } -.tsd-kind-module.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -102px; } -.tsd-kind-module.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -102px; } - -.tsd-kind-external-module > .tsd-kind-icon:before { background-position: 0px -102px; } -.tsd-kind-external-module.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -102px; } -.tsd-kind-external-module.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -102px; } - -.tsd-kind-enum > .tsd-kind-icon:before { background-position: 0px -119px; } -.tsd-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -119px; } -.tsd-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -119px; } - -.tsd-kind-enum-member > .tsd-kind-icon:before { background-position: 0px -136px; } -.tsd-kind-enum-member.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -136px; } -.tsd-kind-enum-member.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -136px; } - -.tsd-kind-signature > .tsd-kind-icon:before { background-position: 0px -153px; } -.tsd-kind-signature.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -153px; } -.tsd-kind-signature.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -153px; } - -.tsd-kind-type-alias > .tsd-kind-icon:before { background-position: 0px -170px; } -.tsd-kind-type-alias.tsd-is-protected > .tsd-kind-icon:before { background-position: -17px -170px; } -.tsd-kind-type-alias.tsd-is-private > .tsd-kind-icon:before { background-position: -34px -170px; } - -.tsd-kind-variable > .tsd-kind-icon:before { background-position: -136px -0px; } -.tsd-kind-variable.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -0px; } -.tsd-kind-variable.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -0px; } -.tsd-kind-variable.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -0px; } -.tsd-kind-variable.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -0px; } -.tsd-kind-variable.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -0px; } -.tsd-kind-variable.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -0px; } -.tsd-kind-variable.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -0px; } -.tsd-kind-variable.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -0px; } -.tsd-kind-variable.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -0px; } -.tsd-kind-variable.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -0px; } -.tsd-kind-variable.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -0px; } -.tsd-kind-variable.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -0px; } - -.tsd-kind-property > .tsd-kind-icon:before { background-position: -136px -0px; } -.tsd-kind-property.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -0px; } -.tsd-kind-property.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -0px; } -.tsd-kind-property.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -0px; } -.tsd-kind-property.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -0px; } -.tsd-kind-property.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -0px; } -.tsd-kind-property.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -0px; } -.tsd-kind-property.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -0px; } -.tsd-kind-property.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -0px; } -.tsd-kind-property.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -0px; } -.tsd-kind-property.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -0px; } -.tsd-kind-property.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -0px; } -.tsd-kind-property.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -0px; } - -.tsd-kind-get-signature > .tsd-kind-icon:before { background-position: -136px -17px; } -.tsd-kind-get-signature.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -17px; } -.tsd-kind-get-signature.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -17px; } -.tsd-kind-get-signature.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -17px; } - -.tsd-kind-set-signature > .tsd-kind-icon:before { background-position: -136px -34px; } -.tsd-kind-set-signature.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -34px; } -.tsd-kind-set-signature.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -34px; } -.tsd-kind-set-signature.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -34px; } - -.tsd-kind-accessor > .tsd-kind-icon:before { background-position: -136px -51px; } -.tsd-kind-accessor.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -51px; } -.tsd-kind-accessor.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -51px; } -.tsd-kind-accessor.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -51px; } -.tsd-kind-accessor.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -51px; } -.tsd-kind-accessor.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -51px; } -.tsd-kind-accessor.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -51px; } -.tsd-kind-accessor.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -51px; } -.tsd-kind-accessor.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -51px; } -.tsd-kind-accessor.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -51px; } -.tsd-kind-accessor.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -51px; } -.tsd-kind-accessor.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -51px; } -.tsd-kind-accessor.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -51px; } - -.tsd-kind-function > .tsd-kind-icon:before { background-position: -136px -68px; } -.tsd-kind-function.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -68px; } -.tsd-kind-function.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-function.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -68px; } -.tsd-kind-function.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -68px; } -.tsd-kind-function.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -68px; } -.tsd-kind-function.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -68px; } -.tsd-kind-function.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-function.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -68px; } -.tsd-kind-function.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -68px; } -.tsd-kind-function.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-function.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -68px; } -.tsd-kind-function.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -68px; } - -.tsd-kind-method > .tsd-kind-icon:before { background-position: -136px -68px; } -.tsd-kind-method.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -68px; } -.tsd-kind-method.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-method.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -68px; } -.tsd-kind-method.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -68px; } -.tsd-kind-method.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -68px; } -.tsd-kind-method.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -68px; } -.tsd-kind-method.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-method.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -68px; } -.tsd-kind-method.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -68px; } -.tsd-kind-method.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-method.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -68px; } -.tsd-kind-method.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -68px; } - -.tsd-kind-call-signature > .tsd-kind-icon:before { background-position: -136px -68px; } -.tsd-kind-call-signature.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -68px; } -.tsd-kind-call-signature.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -68px; } -.tsd-kind-call-signature.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -68px; } - -.tsd-kind-function.tsd-has-type-parameter > .tsd-kind-icon:before { background-position: -136px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -85px; } -.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -85px; } - -.tsd-kind-method.tsd-has-type-parameter > .tsd-kind-icon:before { background-position: -136px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -85px; } -.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -85px; } - -.tsd-kind-constructor > .tsd-kind-icon:before { background-position: -136px -102px; } -.tsd-kind-constructor.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -102px; } -.tsd-kind-constructor.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -102px; } -.tsd-kind-constructor.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -102px; } -.tsd-kind-constructor.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -102px; } -.tsd-kind-constructor.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -102px; } -.tsd-kind-constructor.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -102px; } -.tsd-kind-constructor.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -102px; } -.tsd-kind-constructor.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -102px; } -.tsd-kind-constructor.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -102px; } -.tsd-kind-constructor.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -102px; } -.tsd-kind-constructor.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -102px; } -.tsd-kind-constructor.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -102px; } - -.tsd-kind-constructor-signature > .tsd-kind-icon:before { background-position: -136px -102px; } -.tsd-kind-constructor-signature.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -102px; } -.tsd-kind-constructor-signature.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -102px; } -.tsd-kind-constructor-signature.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -102px; } - -.tsd-kind-index-signature > .tsd-kind-icon:before { background-position: -136px -119px; } -.tsd-kind-index-signature.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -119px; } -.tsd-kind-index-signature.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -119px; } -.tsd-kind-index-signature.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -119px; } - -.tsd-kind-event > .tsd-kind-icon:before { background-position: -136px -136px; } -.tsd-kind-event.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -136px; } -.tsd-kind-event.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -136px; } -.tsd-kind-event.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -136px; } -.tsd-kind-event.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -136px; } -.tsd-kind-event.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -136px; } -.tsd-kind-event.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -136px; } -.tsd-kind-event.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -136px; } -.tsd-kind-event.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -136px; } -.tsd-kind-event.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -136px; } -.tsd-kind-event.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -136px; } -.tsd-kind-event.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -136px; } -.tsd-kind-event.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -136px; } - -.tsd-is-static > .tsd-kind-icon:before { background-position: -136px -153px; } -.tsd-is-static.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -153px; } -.tsd-is-static.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -153px; } -.tsd-is-static.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -153px; } -.tsd-is-static.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -153px; } -.tsd-is-static.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -153px; } -.tsd-is-static.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -153px; } -.tsd-is-static.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -153px; } -.tsd-is-static.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -153px; } -.tsd-is-static.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -153px; } -.tsd-is-static.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -153px; } -.tsd-is-static.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -153px; } -.tsd-is-static.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -153px; } - -.tsd-is-static.tsd-kind-function > .tsd-kind-icon:before { background-position: -136px -170px; } -.tsd-is-static.tsd-kind-function.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -170px; } -.tsd-is-static.tsd-kind-function.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -170px; } -.tsd-is-static.tsd-kind-function.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -170px; } - -.tsd-is-static.tsd-kind-method > .tsd-kind-icon:before { background-position: -136px -170px; } -.tsd-is-static.tsd-kind-method.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -170px; } -.tsd-is-static.tsd-kind-method.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -170px; } -.tsd-is-static.tsd-kind-method.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -170px; } - -.tsd-is-static.tsd-kind-call-signature > .tsd-kind-icon:before { background-position: -136px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -170px; } -.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -170px; } - -.tsd-is-static.tsd-kind-event > .tsd-kind-icon:before { background-position: -136px -187px; } -.tsd-is-static.tsd-kind-event.tsd-is-protected > .tsd-kind-icon:before { background-position: -153px -187px; } -.tsd-is-static.tsd-kind-event.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-class > .tsd-kind-icon:before { background-position: -51px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { background-position: -68px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { background-position: -85px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited > .tsd-kind-icon:before { background-position: -102px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-enum > .tsd-kind-icon:before { background-position: -170px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { background-position: -187px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { background-position: -119px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-interface > .tsd-kind-icon:before { background-position: -204px -187px; } -.tsd-is-static.tsd-kind-event.tsd-parent-kind-interface.tsd-is-inherited > .tsd-kind-icon:before { background-position: -221px -187px; } - -.no-transition { transition: none !important; } - -@-webkit-keyframes fade-in { from { opacity: 0; } - to { opacity: 1; } } - -@keyframes fade-in { from { opacity: 0; } - to { opacity: 1; } } -@-webkit-keyframes fade-out { from { opacity: 1; visibility: visible; } - to { opacity: 0; } } -@keyframes fade-out { from { opacity: 1; visibility: visible; } - to { opacity: 0; } } -@-webkit-keyframes fade-in-delayed { 0% { opacity: 0; } - 33% { opacity: 0; } - 100% { opacity: 1; } } -@keyframes fade-in-delayed { 0% { opacity: 0; } - 33% { opacity: 0; } - 100% { opacity: 1; } } -@-webkit-keyframes fade-out-delayed { 0% { opacity: 1; visibility: visible; } - 66% { opacity: 0; } - 100% { opacity: 0; } } -@keyframes fade-out-delayed { 0% { opacity: 1; visibility: visible; } - 66% { opacity: 0; } - 100% { opacity: 0; } } -@-webkit-keyframes shift-to-left { from { -webkit-transform: translate(0, 0); transform: translate(0, 0); } - to { -webkit-transform: translate(-25%, 0); transform: translate(-25%, 0); } } -@keyframes shift-to-left { from { -webkit-transform: translate(0, 0); transform: translate(0, 0); } - to { -webkit-transform: translate(-25%, 0); transform: translate(-25%, 0); } } -@-webkit-keyframes unshift-to-left { from { -webkit-transform: translate(-25%, 0); transform: translate(-25%, 0); } - to { -webkit-transform: translate(0, 0); transform: translate(0, 0); } } -@keyframes unshift-to-left { from { -webkit-transform: translate(-25%, 0); transform: translate(-25%, 0); } - to { -webkit-transform: translate(0, 0); transform: translate(0, 0); } } -@-webkit-keyframes pop-in-from-right { from { -webkit-transform: translate(100%, 0); transform: translate(100%, 0); } - to { -webkit-transform: translate(0, 0); transform: translate(0, 0); } } -@keyframes pop-in-from-right { from { -webkit-transform: translate(100%, 0); transform: translate(100%, 0); } - to { -webkit-transform: translate(0, 0); transform: translate(0, 0); } } -@-webkit-keyframes pop-out-to-right { from { -webkit-transform: translate(0, 0); transform: translate(0, 0); visibility: visible; } - to { -webkit-transform: translate(100%, 0); transform: translate(100%, 0); } } -@keyframes pop-out-to-right { from { -webkit-transform: translate(0, 0); transform: translate(0, 0); visibility: visible; } - to { -webkit-transform: translate(100%, 0); transform: translate(100%, 0); } } -body { background: #fdfdfd; font-family: "Segoe UI", sans-serif; font-size: 16px; color: #222; } - -a { color: #4da6ff; text-decoration: none; } -a:hover { text-decoration: underline; } - -code, pre { font-family: Menlo, Monaco, Consolas, "Courier New", monospace; padding: 0.2em; margin: 0; font-size: 14px; background-color: rgba(0, 0, 0, 0.04); } - -pre { padding: 10px; } -pre code { padding: 0; font-size: 100%; background-color: transparent; } - -.tsd-typography { line-height: 1.333em; } -.tsd-typography ul { list-style: square; padding: 0 0 0 20px; margin: 0; } -.tsd-typography h4, .tsd-typography .tsd-index-panel h3, .tsd-index-panel .tsd-typography h3, .tsd-typography h5, .tsd-typography h6 { font-size: 1em; margin: 0; } -.tsd-typography h5, .tsd-typography h6 { font-weight: normal; } -.tsd-typography p, .tsd-typography ul, .tsd-typography ol { margin: 1em 0; } - -@media (min-width: 901px) and (max-width: 1024px) { html.default .col-content { width: 72%; } - html.default .col-menu { width: 28%; } - html.default .tsd-navigation { padding-left: 10px; } } -@media (max-width: 900px) { html.default .col-content { float: none; width: 100%; } - html.default .col-menu { position: fixed !important; overflow: auto; -webkit-overflow-scrolling: touch; overflow-scrolling: touch; z-index: 1024; top: 0 !important; bottom: 0 !important; left: auto !important; right: 0 !important; width: 100%; padding: 20px 20px 0 0; max-width: 450px; visibility: hidden; background-color: #fff; -webkit-transform: translate(100%, 0); transform: translate(100%, 0); } - html.default .col-menu > *:last-child { padding-bottom: 20px; } - html.default .overlay { content: ""; display: block; position: fixed; z-index: 1023; top: 0; left: 0; right: 0; bottom: 0; background-color: rgba(0, 0, 0, 0.75); visibility: hidden; } - html.default.to-has-menu .overlay { -webkit-animation: fade-in 0.4s; animation: fade-in 0.4s; } - html.default.to-has-menu header, html.default.to-has-menu footer, html.default.to-has-menu .col-content { -webkit-animation: shift-to-left 0.4s; animation: shift-to-left 0.4s; } - html.default.to-has-menu .col-menu { -webkit-animation: pop-in-from-right 0.4s; animation: pop-in-from-right 0.4s; } - html.default.from-has-menu .overlay { -webkit-animation: fade-out 0.4s; animation: fade-out 0.4s; } - html.default.from-has-menu header, html.default.from-has-menu footer, html.default.from-has-menu .col-content { -webkit-animation: unshift-to-left 0.4s; animation: unshift-to-left 0.4s; } - html.default.from-has-menu .col-menu { -webkit-animation: pop-out-to-right 0.4s; animation: pop-out-to-right 0.4s; } - html.default.has-menu body { overflow: hidden; } - html.default.has-menu .overlay { visibility: visible; } - html.default.has-menu header, html.default.has-menu footer, html.default.has-menu .col-content { -webkit-transform: translate(-25%, 0); transform: translate(-25%, 0); } - html.default.has-menu .col-menu { visibility: visible; -webkit-transform: translate(0, 0); transform: translate(0, 0); } } - -.tsd-page-title { padding: 70px 0 20px 0; margin: 0 0 40px 0; background: #fff; box-shadow: 0 0 5px rgba(0, 0, 0, 0.35); } -.tsd-page-title h1 { margin: 0; } - -.tsd-breadcrumb { margin: 0; padding: 0; color: #808080; } -.tsd-breadcrumb a { color: #808080; text-decoration: none; } -.tsd-breadcrumb a:hover { text-decoration: underline; } -.tsd-breadcrumb li { display: inline; } -.tsd-breadcrumb li:after { content: " / "; } - -html.minimal .container { margin: 0; } -html.minimal .container-main { padding-top: 50px; padding-bottom: 0; } -html.minimal .content-wrap { padding-left: 300px; } -html.minimal .tsd-navigation { position: fixed !important; overflow: auto; -webkit-overflow-scrolling: touch; overflow-scrolling: touch; box-sizing: border-box; z-index: 1; left: 0; top: 40px; bottom: 0; width: 300px; padding: 20px; margin: 0; } -html.minimal .tsd-member .tsd-member { margin-left: 0; } -html.minimal .tsd-page-toolbar { position: fixed; z-index: 2; } -html.minimal #tsd-filter .tsd-filter-group { right: 0; -webkit-transform: none; transform: none; } -html.minimal footer { background-color: transparent; } -html.minimal footer .container { padding: 0; } -html.minimal .tsd-generator { padding: 0; } -@media (max-width: 900px) { html.minimal .tsd-navigation { display: none; } - html.minimal .content-wrap { padding-left: 0; } } - -dl.tsd-comment-tags { overflow: hidden; } -dl.tsd-comment-tags dt { clear: both; float: left; padding: 1px 5px; margin: 0 10px 0 0; border-radius: 4px; border: 1px solid #808080; color: #808080; font-size: 0.8em; font-weight: normal; } -dl.tsd-comment-tags dd { margin: 0 0 10px 0; } -dl.tsd-comment-tags p { margin: 0; } - -.tsd-panel.tsd-comment .lead { font-size: 1.1em; line-height: 1.333em; margin-bottom: 2em; } -.tsd-panel.tsd-comment .lead:last-child { margin-bottom: 0; } - -.toggle-protected .tsd-is-private { display: none; } - -.toggle-public .tsd-is-private, .toggle-public .tsd-is-protected, .toggle-public .tsd-is-private-protected { display: none; } - -.toggle-inherited .tsd-is-inherited { display: none; } - -.toggle-only-exported .tsd-is-not-exported { display: none; } - -.toggle-externals .tsd-is-external { display: none; } - -#tsd-filter { position: relative; display: inline-block; height: 40px; vertical-align: bottom; } -.no-filter #tsd-filter { display: none; } -#tsd-filter .tsd-filter-group { display: inline-block; height: 40px; vertical-align: bottom; white-space: nowrap; } -#tsd-filter input { display: none; } -@media (max-width: 900px) { #tsd-filter .tsd-filter-group { display: block; position: absolute; top: 40px; right: 20px; height: auto; background-color: #fff; visibility: hidden; -webkit-transform: translate(50%, 0); transform: translate(50%, 0); box-shadow: 0 0 4px rgba(0, 0, 0, 0.25); } - .has-options #tsd-filter .tsd-filter-group { visibility: visible; } - .to-has-options #tsd-filter .tsd-filter-group { -webkit-animation: fade-in 0.2s; animation: fade-in 0.2s; } - .from-has-options #tsd-filter .tsd-filter-group { -webkit-animation: fade-out 0.2s; animation: fade-out 0.2s; } - #tsd-filter label, #tsd-filter .tsd-select { display: block; padding-right: 20px; } } - -footer { border-top: 1px solid #eee; background-color: #fff; } -footer.with-border-bottom { border-bottom: 1px solid #eee; } -footer .tsd-legend-group { font-size: 0; } -footer .tsd-legend { display: inline-block; width: 25%; padding: 0; font-size: 16px; list-style: none; line-height: 1.333em; vertical-align: top; } -@media (max-width: 900px) { footer .tsd-legend { width: 50%; } } - -.tsd-hierarchy { list-style: square; padding: 0 0 0 20px; margin: 0; } -.tsd-hierarchy .target { font-weight: bold; } - -.tsd-index-panel .tsd-index-content { margin-bottom: -30px !important; } -.tsd-index-panel .tsd-index-section { margin-bottom: 30px !important; } -.tsd-index-panel h3 { margin: 0 -20px 10px -20px; padding: 0 20px 10px 20px; border-bottom: 1px solid #eee; } -.tsd-index-panel ul.tsd-index-list { -webkit-column-count: 3; -moz-column-count: 3; -ms-column-count: 3; -o-column-count: 3; column-count: 3; -webkit-column-gap: 20px; -moz-column-gap: 20px; -ms-column-gap: 20px; -o-column-gap: 20px; column-gap: 20px; padding: 0; list-style: none; line-height: 1.333em; } -@media (max-width: 900px) { .tsd-index-panel ul.tsd-index-list { -webkit-column-count: 1; -moz-column-count: 1; -ms-column-count: 1; -o-column-count: 1; column-count: 1; } } -@media (min-width: 901px) and (max-width: 1024px) { .tsd-index-panel ul.tsd-index-list { -webkit-column-count: 2; -moz-column-count: 2; -ms-column-count: 2; -o-column-count: 2; column-count: 2; } } -.tsd-index-panel ul.tsd-index-list li { -webkit-column-break-inside: avoid; -moz-column-break-inside: avoid; -ms-column-break-inside: avoid; -o-column-break-inside: avoid; column-break-inside: avoid; -webkit-page-break-inside: avoid; -moz-page-break-inside: avoid; -ms-page-break-inside: avoid; -o-page-break-inside: avoid; page-break-inside: avoid; } -.tsd-index-panel a, .tsd-index-panel .tsd-parent-kind-module a { color: #9600ff; } -.tsd-index-panel .tsd-parent-kind-interface a { color: #7da01f; } -.tsd-index-panel .tsd-parent-kind-enum a { color: #cc9900; } -.tsd-index-panel .tsd-parent-kind-class a { color: #4da6ff; } -.tsd-index-panel .tsd-kind-module a { color: #9600ff; } -.tsd-index-panel .tsd-kind-interface a { color: #7da01f; } -.tsd-index-panel .tsd-kind-enum a { color: #cc9900; } -.tsd-index-panel .tsd-kind-class a { color: #4da6ff; } -.tsd-index-panel .tsd-is-private a { color: #808080; } - -.tsd-flag { display: inline-block; padding: 1px 5px; border-radius: 4px; color: #fff; background-color: #808080; text-indent: 0; font-size: 14px; font-weight: normal; } - -.tsd-anchor { position: absolute; top: -100px; } - -.tsd-member { position: relative; } -.tsd-member .tsd-anchor + h3 { margin-top: 0; margin-bottom: 0; border-bottom: none; } - -.tsd-navigation { padding: 0 0 0 40px; } -.tsd-navigation a { display: block; padding-top: 2px; padding-bottom: 2px; border-left: 2px solid transparent; color: #222; text-decoration: none; transition: border-left-color 0.1s; } -.tsd-navigation a:hover { text-decoration: underline; } -.tsd-navigation ul { margin: 0; padding: 0; list-style: none; } -.tsd-navigation li { padding: 0; } - -.tsd-navigation.primary { padding-bottom: 40px; } -.tsd-navigation.primary a { display: block; padding-top: 6px; padding-bottom: 6px; } -.tsd-navigation.primary ul li a { padding-left: 5px; } -.tsd-navigation.primary ul li li a { padding-left: 25px; } -.tsd-navigation.primary ul li li li a { padding-left: 45px; } -.tsd-navigation.primary ul li li li li a { padding-left: 65px; } -.tsd-navigation.primary ul li li li li li a { padding-left: 85px; } -.tsd-navigation.primary ul li li li li li li a { padding-left: 105px; } -.tsd-navigation.primary > ul { border-bottom: 1px solid #eee; } -.tsd-navigation.primary li { border-top: 1px solid #eee; } -.tsd-navigation.primary li.current > a { font-weight: bold; } -.tsd-navigation.primary li.label span { display: block; padding: 20px 0 6px 5px; color: #808080; } -.tsd-navigation.primary li.globals + li > span, .tsd-navigation.primary li.globals + li > a { padding-top: 20px; } - -.tsd-navigation.secondary ul { transition: opacity 0.2s; } -.tsd-navigation.secondary ul li a { padding-left: 25px; } -.tsd-navigation.secondary ul li li a { padding-left: 45px; } -.tsd-navigation.secondary ul li li li a { padding-left: 65px; } -.tsd-navigation.secondary ul li li li li a { padding-left: 85px; } -.tsd-navigation.secondary ul li li li li li a { padding-left: 105px; } -.tsd-navigation.secondary ul li li li li li li a { padding-left: 125px; } -.tsd-navigation.secondary ul.current a { border-left-color: #eee; } -.tsd-navigation.secondary li.focus > a, .tsd-navigation.secondary ul.current li.focus > a { border-left-color: #000; } -.tsd-navigation.secondary li.current { margin-top: 20px; margin-bottom: 20px; border-left-color: #eee; } -.tsd-navigation.secondary li.current > a { font-weight: bold; } - -@media (min-width: 901px) { .menu-sticky-wrap { position: static; } - .no-csspositionsticky .menu-sticky-wrap.sticky { position: fixed; } - .no-csspositionsticky .menu-sticky-wrap.sticky-current { position: fixed; } - .no-csspositionsticky .menu-sticky-wrap.sticky-current ul.before-current, .no-csspositionsticky .menu-sticky-wrap.sticky-current ul.after-current { opacity: 0; } - .no-csspositionsticky .menu-sticky-wrap.sticky-bottom { position: absolute; top: auto !important; left: auto !important; bottom: 0; right: 0; } - .csspositionsticky .menu-sticky-wrap.sticky { position: -webkit-sticky; position: sticky; } - .csspositionsticky .menu-sticky-wrap.sticky-current { position: -webkit-sticky; position: sticky; } } - -.tsd-panel { margin: 20px 0; padding: 20px; background-color: #fff; box-shadow: 0 0 4px rgba(0, 0, 0, 0.25); } -.tsd-panel:empty { display: none; } -.tsd-panel > h1, .tsd-panel > h2, .tsd-panel > h3 { margin: 1.5em -20px 10px -20px; padding: 0 20px 10px 20px; border-bottom: 1px solid #eee; } -.tsd-panel > h1.tsd-before-signature, .tsd-panel > h2.tsd-before-signature, .tsd-panel > h3.tsd-before-signature { margin-bottom: 0; border-bottom: 0; } -.tsd-panel table { display: block; width: 100%; overflow: auto; margin-top: 10px; word-break: normal; word-break: keep-all; } -.tsd-panel table th { font-weight: bold; } -.tsd-panel table th, .tsd-panel table td { padding: 6px 13px; border: 1px solid #ddd; } -.tsd-panel table tr { background-color: #fff; border-top: 1px solid #ccc; } -.tsd-panel table tr:nth-child(2n) { background-color: #f8f8f8; } - -.tsd-panel-group { margin: 60px 0; } -.tsd-panel-group > h1, .tsd-panel-group > h2, .tsd-panel-group > h3 { padding-left: 20px; padding-right: 20px; } - -#tsd-search { transition: background-color 0.2s; } -#tsd-search .title { position: relative; z-index: 2; } -#tsd-search .field { position: absolute; left: 0; top: 0; right: 40px; height: 40px; } -#tsd-search .field input { box-sizing: border-box; position: relative; top: -50px; z-index: 1; width: 100%; padding: 0 10px; opacity: 0; outline: 0; border: 0; background: transparent; color: #222; } -#tsd-search .field label { position: absolute; overflow: hidden; right: -40px; } -#tsd-search .field input, #tsd-search .title { transition: opacity 0.2s; } -#tsd-search .results { position: absolute; visibility: hidden; top: 40px; width: 100%; margin: 0; padding: 0; list-style: none; box-shadow: 0 0 4px rgba(0, 0, 0, 0.25); } -#tsd-search .results li { padding: 0 10px; background-color: #fdfdfd; } -#tsd-search .results li:nth-child(even) { background-color: #fff; } -#tsd-search .results li.state { display: none; } -#tsd-search .results li.current, #tsd-search .results li:hover { background-color: #eee; } -#tsd-search .results a { display: block; } -#tsd-search .results a:before { top: 10px; } -#tsd-search .results span.parent { color: #808080; font-weight: normal; } -#tsd-search.has-focus { background-color: #eee; } -#tsd-search.has-focus .field input { top: 0; opacity: 1; } -#tsd-search.has-focus .title { z-index: 0; opacity: 0; } -#tsd-search.has-focus .results { visibility: visible; } -#tsd-search.loading .results li.state.loading { display: block; } -#tsd-search.failure .results li.state.failure { display: block; } - -.tsd-signature { margin: 0 0 1em 0; padding: 10px; border: 1px solid #eee; font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } -.tsd-signature.tsd-kind-icon { padding-left: 30px; } -.tsd-signature.tsd-kind-icon:before { top: 10px; left: 10px; } -.tsd-panel > .tsd-signature { margin-left: -20px; margin-right: -20px; border-width: 1px 0; } -.tsd-panel > .tsd-signature.tsd-kind-icon { padding-left: 40px; } -.tsd-panel > .tsd-signature.tsd-kind-icon:before { left: 20px; } - -.tsd-signature-symbol { color: #808080; font-weight: normal; } - -.tsd-signature-type { font-style: italic; font-weight: normal; } - -.tsd-signatures { padding: 0; margin: 0 0 1em 0; border: 1px solid #eee; } -.tsd-signatures .tsd-signature { margin: 0; border-width: 1px 0 0 0; transition: background-color 0.1s; } -.tsd-signatures .tsd-signature:first-child { border-top-width: 0; } -.tsd-signatures .tsd-signature.current { background-color: #eee; } -.tsd-signatures.active > .tsd-signature { cursor: pointer; } -.tsd-panel > .tsd-signatures { margin-left: -20px; margin-right: -20px; border-width: 1px 0; } -.tsd-panel > .tsd-signatures .tsd-signature.tsd-kind-icon { padding-left: 40px; } -.tsd-panel > .tsd-signatures .tsd-signature.tsd-kind-icon:before { left: 20px; } -.tsd-panel > a.anchor + .tsd-signatures { border-top-width: 0; margin-top: -20px; } - -ul.tsd-descriptions { position: relative; overflow: hidden; transition: height 0.3s; padding: 0; list-style: none; } -ul.tsd-descriptions.active > .tsd-description { display: none; } -ul.tsd-descriptions.active > .tsd-description.current { display: block; } -ul.tsd-descriptions.active > .tsd-description.fade-in { -webkit-animation: fade-in-delayed 0.3s; animation: fade-in-delayed 0.3s; } -ul.tsd-descriptions.active > .tsd-description.fade-out { -webkit-animation: fade-out-delayed 0.3s; animation: fade-out-delayed 0.3s; position: absolute; display: block; top: 0; left: 0; right: 0; opacity: 0; visibility: hidden; } -ul.tsd-descriptions h4, ul.tsd-descriptions .tsd-index-panel h3, .tsd-index-panel ul.tsd-descriptions h3 { font-size: 16px; margin: 1em 0 0.5em 0; } - -ul.tsd-parameters, ul.tsd-type-parameters { list-style: square; margin: 0; padding-left: 20px; } -ul.tsd-parameters > li.tsd-parameter-siganture, ul.tsd-type-parameters > li.tsd-parameter-siganture { list-style: none; margin-left: -20px; } -ul.tsd-parameters h5, ul.tsd-type-parameters h5 { font-size: 16px; margin: 1em 0 0.5em 0; } -ul.tsd-parameters .tsd-comment, ul.tsd-type-parameters .tsd-comment { margin-top: -0.5em; } - -.tsd-sources { font-size: 14px; color: #808080; margin: 0 0 1em 0; } -.tsd-sources a { color: #808080; text-decoration: underline; } -.tsd-sources ul, .tsd-sources p { margin: 0 !important; } -.tsd-sources ul { list-style: none; padding: 0; } - -.tsd-page-toolbar { position: absolute; z-index: 1; top: 0; left: 0; width: 100%; height: 40px; color: #333; background: #fff; border-bottom: 1px solid #eee; } -.tsd-page-toolbar a { color: #333; text-decoration: none; } -.tsd-page-toolbar a.title { font-weight: bold; } -.tsd-page-toolbar a.title:hover { text-decoration: underline; } -.tsd-page-toolbar .table-wrap { display: table; width: 100%; height: 40px; } -.tsd-page-toolbar .table-cell { display: table-cell; position: relative; white-space: nowrap; line-height: 40px; } -.tsd-page-toolbar .table-cell:first-child { width: 100%; } - -.tsd-widget:before, .tsd-select .tsd-select-label:before, .tsd-select .tsd-select-list li:before { content: ""; display: inline-block; width: 40px; height: 40px; margin: 0 -8px 0 0; background-image: url(../images/widgets.png); background-repeat: no-repeat; text-indent: -1024px; vertical-align: bottom; } -@media (-webkit-min-device-pixel-ratio: 1.5), (min-device-pixel-ratio: 1.5), (min-resolution: 144dpi) { .tsd-widget:before, .tsd-select .tsd-select-label:before, .tsd-select .tsd-select-list li:before { background-image: url(../images/widgets@2x.png); background-size: 320px 40px; } } - -.tsd-widget { display: inline-block; overflow: hidden; opacity: 0.6; height: 40px; transition: opacity 0.1s, background-color 0.2s; vertical-align: bottom; cursor: pointer; } -.tsd-widget:hover { opacity: 0.8; } -.tsd-widget.active { opacity: 1; background-color: #eee; } -.tsd-widget.no-caption { width: 40px; } -.tsd-widget.no-caption:before { margin: 0; } -.tsd-widget.search:before { background-position: 0 0; } -.tsd-widget.menu:before { background-position: -40px 0; } -.tsd-widget.options:before { background-position: -80px 0; } -.tsd-widget.options, .tsd-widget.menu { display: none; } -@media (max-width: 900px) { .tsd-widget.options, .tsd-widget.menu { display: inline-block; } } -input[type=checkbox] + .tsd-widget:before { background-position: -120px 0; } -input[type=checkbox]:checked + .tsd-widget:before { background-position: -160px 0; } - -.tsd-select { position: relative; display: inline-block; height: 40px; transition: opacity 0.1s, background-color 0.2s; vertical-align: bottom; cursor: pointer; } -.tsd-select .tsd-select-label { opacity: 0.6; transition: opacity 0.2s; } -.tsd-select .tsd-select-label:before { background-position: -240px 0; } -.tsd-select.active .tsd-select-label { opacity: 0.8; } -.tsd-select.active .tsd-select-list { visibility: visible; opacity: 1; transition-delay: 0s; } -.tsd-select .tsd-select-list { position: absolute; visibility: hidden; top: 40px; left: 0; margin: 0; padding: 0; opacity: 0; list-style: none; box-shadow: 0 0 4px rgba(0, 0, 0, 0.25); transition: visibility 0s 0.2s, opacity 0.2s; } -.tsd-select .tsd-select-list li { padding: 0 20px 0 0; background-color: #fdfdfd; } -.tsd-select .tsd-select-list li:before { background-position: 40px 0; } -.tsd-select .tsd-select-list li:nth-child(even) { background-color: #fff; } -.tsd-select .tsd-select-list li:hover { background-color: #eee; } -.tsd-select .tsd-select-list li.selected:before { background-position: -200px 0; } -@media (max-width: 900px) { .tsd-select .tsd-select-list { top: 0; left: auto; right: 100%; margin-right: -5px; } - .tsd-select .tsd-select-label:before { background-position: -280px 0; } } - -img { max-width: 100%; } diff --git a/docs/assets/css/main.css.map b/docs/assets/css/main.css.map deleted file mode 100644 index bc17fe48..00000000 --- a/docs/assets/css/main.css.map +++ /dev/null @@ -1,7 +0,0 @@ -{ -"version": 3, -"mappings": ";;;AASA,gGAAgG,GAC5F,OAAO,EAAE,KAAK;;;AAKlB,oBAAoB,GAChB,OAAO,EAAE,YAAY,EACrB,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,CAAC;;;AAMZ,qBAAqB,GACjB,OAAO,EAAE,IAAI,EACb,MAAM,EAAE,CAAC;;;AAMb,QAAQ,GACJ,OAAO,EAAE,IAAI;;;;AAYjB,IAAI,GACA,SAAS,EAAE,IAAI,UAEf,oBAAoB,EAAE,IAAI,UAE1B,wBAAwB,EAAE,IAAI,UAE9B,WAAW,EAAE,UAAU;;;AAM3B,+BAA+B,GAC3B,WAAW,EAAE,UAAU;;;AAK3B,IAAI,GACA,MAAM,EAAE,CAAC;;;;AAUT,OAAO,GACH,OAAO,EAAE,WAAW;AACxB,iBAAiB,GACb,OAAO,EAAE,CAAC;;;;;AAclB,EAAE,GACE,SAAS,EAAE,GAAG,EACd,MAAM,EAAE,QAAQ;;AAEpB,EAAE,GACE,SAAS,EAAE,KAAK,EAChB,MAAM,EAAE,QAAQ;;AAEpB,EAAE,GACE,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,KAAK;;AAEjB,uBAAE,GACE,SAAS,EAAE,GAAG,EACd,MAAM,EAAE,QAAQ;;AAEpB,EAAE,GACE,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,QAAQ;;AAEpB,EAAE,GACE,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,QAAQ;;;AAKpB,WAAW,GACP,aAAa,EAAE,UAAU;;;AAK7B,SAAS,GACL,WAAW,EAAE,IAAI;;AAErB,UAAU,GACN,MAAM,EAAE,QAAQ;;;AAKpB,GAAG,GACC,UAAU,EAAE,MAAM;;;AAMtB,EAAE,GACE,eAAe,EAAE,WAAW,EAC5B,UAAU,EAAE,WAAW,EACvB,MAAM,EAAE,CAAC;;;AAKb,IAAI,GACA,UAAU,EAAE,IAAI,EAChB,KAAK,EAAE,IAAI;;;AAKf,MAAM,GACF,MAAM,EAAE,KAAK;;;AAKjB,oBAAoB,GAChB,WAAW,EAAE,gBAAgB,EAC7B,YAAY,EAAE,wBAAwB,EACtC,SAAS,EAAE,GAAG;;;AAKlB,GAAG,GACC,WAAW,EAAE,GAAG,EAChB,WAAW,EAAE,QAAQ,EACrB,SAAS,EAAE,UAAU;;;AAKzB,CAAC,GACG,MAAM,EAAE,IAAI;AACZ,iBAAiB,GACb,OAAO,EAAE,EAAE,EACX,OAAO,EAAE,IAAI;;;;AAQrB,KAAK,GACD,SAAS,EAAE,GAAG;;;AAKlB,GAAG,GACC,SAAS,EAAE,GAAG,EACd,WAAW,EAAE,CAAC,EACd,QAAQ,EAAE,QAAQ,EAClB,cAAc,EAAE,QAAQ;;AAE5B,GAAG,GACC,SAAS,EAAE,GAAG,EACd,WAAW,EAAE,CAAC,EACd,QAAQ,EAAE,QAAQ,EAClB,cAAc,EAAE,QAAQ,EACxB,GAAG,EAAE,MAAM;;AAEf,GAAG,GACC,MAAM,EAAE,OAAO;;;;AASnB,gBAAgB,GACZ,MAAM,EAAE,KAAK;;AAEjB,EAAE,GACE,MAAM,EAAE,UAAU;;;AAKtB,YAAY,GACR,OAAO,EAAE,UAAU;;;AAMnB,cAAM,GACF,UAAU,EAAE,IAAI,EAChB,gBAAgB,EAAE,IAAI;;;;AAU9B,GAAG,GACC,MAAM,EAAE,CAAC,UAET,sBAAsB,EAAE,OAAO;;;;AAMnC,cAAc,GACV,QAAQ,EAAE,MAAM;;;;AASpB,YAAY,GACR,MAAM,EAAE,CAAC;;;;;AAYb,QAAQ,GACJ,MAAM,EAAE,iBAAiB,EACzB,MAAM,EAAE,KAAK,EACb,OAAO,EAAE,qBAAqB;;;AAOlC,MAAM,GACF,MAAM,EAAE,CAAC,UAET,OAAO,EAAE,CAAC,EACV,WAAW,EAAE,MAAM,UAEnB,YAAY,EAAE,IAAI;;;;AAStB,+BAA+B,GAC3B,SAAS,EAAE,IAAI,UAEf,MAAM,EAAE,CAAC,UAET,cAAc,EAAE,QAAQ,UAExB,eAAe,EAAE,MAAM;;;;AAO3B,aAAa,GACT,WAAW,EAAE,MAAM;;;AAQvB,cAAc,GACV,cAAc,EAAE,IAAI;;;AAWxB,iCAAiC,GAC7B,kBAAkB,EAAE,MAAM,UAE1B,MAAM,EAAE,OAAO,UAEf,SAAS,EAAE,OAAO;;;AAIlB,yCAAiC,GAC7B,kBAAkB,EAAE,MAAM,UAE1B,MAAM,EAAE,OAAO,UAEf,SAAS,EAAE,OAAO;;;;AAM1B,sCAAsC,GAClC,MAAM,EAAE,OAAO;;;AAQnB,KAAK;AACD,2CAAmC,GAC/B,UAAU,EAAE,UAAU,UAEtB,OAAO,EAAE,CAAC,UAEV,OAAO,EAAE,IAAI,UAEb,MAAM,EAAE,IAAI;AAEhB,oBAAgB,GACZ,kBAAkB,EAAE,SAAS,UAE7B,eAAe,EAAE,WAAW,EAC5B,kBAAkB,EAAE,WAAW,UAE/B,UAAU,EAAE,WAAW;AACvB,mGAA6D,GACzD,kBAAkB,EAAE,IAAI;;;;;AAcpC,iDAAiD,GAC7C,MAAM,EAAE,CAAC,EACT,OAAO,EAAE,CAAC;;;AAMd,QAAQ,GACJ,QAAQ,EAAE,IAAI,UAEd,cAAc,EAAE,GAAG;;;;;AAUvB,KAAK,GACD,eAAe,EAAE,QAAQ,EACzB,cAAc,EAAE,CAAC;;;ACnarB,KAAK,GACD,OAAO,EAAE,YAAY,EACrB,OAAO,EAAE,KAAK,EACd,UAAU,EAAE,KAAK,EACjB,KAAK,EAAE,KAAK;;AAEhB,gHAAgH,GAC5G,KAAK,EAAE,OAAO;;AAElB,+KAA+K,GAC3K,KAAK,EAAE,IAAI;;AAEf,cAAc,GACV,KAAK,EAAE,IAAI;AACX,0BAAW,GACP,KAAK,EAAE,IAAI;;AAEnB,uFAAuF,GACnF,KAAK,EAAE,OAAO;;AAElB,kBAAkB,GACd,KAAK,EAAE,OAAO;AACd,+BAAY,GACR,KAAK,EAAE,OAAO;;AAEtB,sKAAsK,GAClK,KAAK,EAAE,OAAO;;AAElB,sUAAsU,GAClU,KAAK,EAAE,OAAO;;AAElB,4CAA4C,GACxC,KAAK,EAAE,OAAO;;AAGd,oBAAc,GACV,WAAW,EAAE,IAAI;AACrB,kBAAY,GACR,KAAK,EAAE,OAAO;AAClB,mBAAa,GACT,KAAK,EAAE,OAAO;AAClB,qBAAe,GACX,KAAK,EAAE,OAAO;;AAEtB,oBAAoB,GAChB,KAAK,EAAE,IAAI;;AC5BX,4nDAAe,GAGX,UAAU,EAAE,CAAC;AAEjB,wiDAAc,GAGV,aAAa,EAAE,CAAC;;ACCxB,UAAU,GACN,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,MAAM;AAhCf,yBAAyB,GACrB,UAAC,GAkCD,OAAO,EAAE,MAAM;;AAEvB,eAAe,GACX,cAAc,EAAE,KAAK;;AAEzB,IAAI,GAEA,QAAQ,EAAE,QAAQ,EAClB,MAAM,EAAE,OAAO;ADpCf,UAAO,GACH,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,KAAK,EACd,OAAO,EAAE,EAAE,EACX,KAAK,EAAE,IAAI,EACX,MAAM,EAAE,CAAC;;ACiCjB,8FAAI,GAEA,UAAU,EAAE,UAAU,EACtB,KAAK,EAAE,IAAI,EACX,OAAO,EAAE,MAAM;;AAGf,MAAc,GAEV,KAAK,EAAE,QAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,QAAkB;;AALnC,MAAc,GAEV,KAAK,EAAE,SAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,SAAkB;;AALnC,MAAc,GAEV,KAAK,EAAE,GAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,GAAkB;;AALnC,MAAc,GAEV,KAAK,EAAE,SAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,SAAkB;;AALnC,MAAc,GAEV,KAAK,EAAE,SAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,SAAkB;;AALnC,MAAc,GAEV,KAAK,EAAE,GAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,GAAkB;;AALnC,MAAc,GAEV,KAAK,EAAE,SAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,SAAkB;;AALnC,MAAc,GAEV,KAAK,EAAE,SAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,SAAkB;;AALnC,MAAc,GAEV,KAAK,EAAE,GAAkB;;AAE7B,SAAiB,GACb,WAAW,EAAE,GAAkB;;AALnC,OAAc,GAEV,KAAK,EAAE,SAAkB;;AAE7B,UAAiB,GACb,WAAW,EAAE,SAAkB;;AALnC,OAAc,GAEV,KAAK,EAAE,SAAkB;;AAE7B,UAAiB,GACb,WAAW,EAAE,SAAkB;;AC5BvC,cAAe,GACX,OAAO,EAAE,KAAK,EACd,QAAQ,EAAE,QAAQ,EAClB,YAAY,EAAE,IAAI,EAClB,WAAW,EAAE,KAAK;AAElB,qBAAS,GACL,OAAO,EAAE,EAAE,EACX,OAAO,EAAE,YAAY,EACrB,cAAc,EAAE,MAAM,EACtB,KAAK,EAAE,IAAI,EACX,MAAM,EAAE,IAAI,EACZ,MAAM,EAAE,WAAW,EACnB,gBAAgB,EAAE,wBAAwB;AF3B9C,qGAAqG,GACjG,qBAAC,GE6BG,gBAAgB,EAAE,2BAA2B,EAC7C,eAAe,EAAE,WAAW;;AAKxC,mCAAoC,GAChC,mBAAmB,EAAE,QAAQ;;AA0BrB,gDAAwB,GACpB,mBAAmB,EAAE,SAAa;AAGtC,iEAA2C,GACvC,mBAAmB,EAAE,WAAuB;AAGhD,+DAAyC,GACrC,mBAAmB,EAAE,WAAqB;;AAT9C,uCAAwB,GACpB,mBAAmB,EAAE,SAAa;AAGtC,wDAA2C,GACvC,mBAAmB,EAAE,WAAuB;AAGhD,sDAAyC,GACrC,mBAAmB,EAAE,WAAqB;;AAT9C,8DAAwB,GACpB,mBAAmB,EAAE,SAAa;AAGtC,+EAA2C,GACvC,mBAAmB,EAAE,WAAuB;AAGhD,6EAAyC,GACrC,mBAAmB,EAAE,WAAqB;;AAT9C,2CAAwB,GACpB,mBAAmB,EAAE,SAAa;AAGtC,4DAA2C,GACvC,mBAAmB,EAAE,WAAuB;AAGhD,0DAAyC,GACrC,mBAAmB,EAAE,WAAqB;;AAT9C,kEAAwB,GACpB,mBAAmB,EAAE,SAAa;AAGtC,mFAA2C,GACvC,mBAAmB,EAAE,WAAuB;AAGhD,iFAAyC,GACrC,mBAAmB,EAAE,WAAqB;;AAT9C,wCAAwB,GACpB,mBAAmB,EAAE,UAAa;AAGtC,yDAA2C,GACvC,mBAAmB,EAAE,YAAuB;AAGhD,uDAAyC,GACrC,mBAAmB,EAAE,YAAqB;;AAT9C,iDAAwB,GACpB,mBAAmB,EAAE,UAAa;AAGtC,kEAA2C,GACvC,mBAAmB,EAAE,YAAuB;AAGhD,gEAAyC,GACrC,mBAAmB,EAAE,YAAqB;;AAT9C,sCAAwB,GACpB,mBAAmB,EAAE,UAAa;AAGtC,uDAA2C,GACvC,mBAAmB,EAAE,YAAuB;AAGhD,qDAAyC,GACrC,mBAAmB,EAAE,YAAqB;;AAT9C,6CAAwB,GACpB,mBAAmB,EAAE,UAAa;AAGtC,8DAA2C,GACvC,mBAAmB,EAAE,YAAuB;AAGhD,4DAAyC,GACrC,mBAAmB,EAAE,YAAqB;;AAT9C,2CAAwB,GACpB,mBAAmB,EAAE,UAAa;AAGtC,4DAA2C,GACvC,mBAAmB,EAAE,YAAuB;AAGhD,0DAAyC,GACrC,mBAAmB,EAAE,YAAqB;;AAT9C,4CAAwB,GACpB,mBAAmB,EAAE,UAAa;AAGtC,6DAA2C,GACvC,mBAAmB,EAAE,YAAuB;AAGhD,2DAAyC,GACrC,mBAAmB,EAAE,YAAqB;;AAiB9C,0CAAwB,GACpB,mBAAmB,EAAE,WAAe;AAGxC,2DAA2C,GACvC,mBAAmB,EAAE,WAAyB;AAGlD,yDAAyC,GACrC,mBAAmB,EAAE,WAAuB;AAI5C,gEAAwB,GACpB,mBAAmB,EAAE,UAA4B;AAGrD,iFAA2C,GACvC,mBAAmB,EAAE,UAAsC;AAG/D,iFAA2C,GACvC,mBAAmB,EAAE,UAA+B;AAGxD,kGAA4D,GACxD,mBAAmB,EAAE,WAAyC;AAGlE,+EAAyC,GACrC,mBAAmB,EAAE,WAAuB;AAKhD,+DAAwB,GACpB,mBAAmB,EAAE,WAAoB;AAG7C,gFAA2C,GACvC,mBAAmB,EAAE,WAA8B;AAGvD,8EAAyC,GACrC,mBAAmB,EAAE,WAAuB;AAKhD,oEAAwB,GACpB,mBAAmB,EAAE,WAAyB;AAGlD,qFAA2C,GACvC,mBAAmB,EAAE,WAAmC;;AAtDhE,0CAAwB,GACpB,mBAAmB,EAAE,WAAe;AAGxC,2DAA2C,GACvC,mBAAmB,EAAE,WAAyB;AAGlD,yDAAyC,GACrC,mBAAmB,EAAE,WAAuB;AAI5C,gEAAwB,GACpB,mBAAmB,EAAE,UAA4B;AAGrD,iFAA2C,GACvC,mBAAmB,EAAE,UAAsC;AAG/D,iFAA2C,GACvC,mBAAmB,EAAE,UAA+B;AAGxD,kGAA4D,GACxD,mBAAmB,EAAE,WAAyC;AAGlE,+EAAyC,GACrC,mBAAmB,EAAE,WAAuB;AAKhD,+DAAwB,GACpB,mBAAmB,EAAE,WAAoB;AAG7C,gFAA2C,GACvC,mBAAmB,EAAE,WAA8B;AAGvD,8EAAyC,GACrC,mBAAmB,EAAE,WAAuB;AAKhD,oEAAwB,GACpB,mBAAmB,EAAE,WAAyB;AAGlD,qFAA2C,GACvC,mBAAmB,EAAE,WAAmC;;AAtDhE,+CAAwB,GACpB,mBAAmB,EAAE,YAAe;AAGxC,gEAA2C,GACvC,mBAAmB,EAAE,YAAyB;AAGlD,8DAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAI5C,qEAAwB,GACpB,mBAAmB,EAAE,WAA4B;AAGrD,sFAA2C,GACvC,mBAAmB,EAAE,WAAsC;AAG/D,sFAA2C,GACvC,mBAAmB,EAAE,WAA+B;AAGxD,uGAA4D,GACxD,mBAAmB,EAAE,YAAyC;AAGlE,oFAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,oEAAwB,GACpB,mBAAmB,EAAE,YAAoB;AAG7C,qFAA2C,GACvC,mBAAmB,EAAE,YAA8B;AAGvD,mFAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,yEAAwB,GACpB,mBAAmB,EAAE,YAAyB;AAGlD,0FAA2C,GACvC,mBAAmB,EAAE,YAAmC;;AAtDhE,+CAAwB,GACpB,mBAAmB,EAAE,YAAe;AAGxC,gEAA2C,GACvC,mBAAmB,EAAE,YAAyB;AAGlD,8DAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAI5C,qEAAwB,GACpB,mBAAmB,EAAE,WAA4B;AAGrD,sFAA2C,GACvC,mBAAmB,EAAE,WAAsC;AAG/D,sFAA2C,GACvC,mBAAmB,EAAE,WAA+B;AAGxD,uGAA4D,GACxD,mBAAmB,EAAE,YAAyC;AAGlE,oFAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,oEAAwB,GACpB,mBAAmB,EAAE,YAAoB;AAG7C,qFAA2C,GACvC,mBAAmB,EAAE,YAA8B;AAGvD,mFAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,yEAAwB,GACpB,mBAAmB,EAAE,YAAyB;AAGlD,0FAA2C,GACvC,mBAAmB,EAAE,YAAmC;;AAtDhE,0CAAwB,GACpB,mBAAmB,EAAE,YAAe;AAGxC,2DAA2C,GACvC,mBAAmB,EAAE,YAAyB;AAGlD,yDAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAI5C,gEAAwB,GACpB,mBAAmB,EAAE,WAA4B;AAGrD,iFAA2C,GACvC,mBAAmB,EAAE,WAAsC;AAG/D,iFAA2C,GACvC,mBAAmB,EAAE,WAA+B;AAGxD,kGAA4D,GACxD,mBAAmB,EAAE,YAAyC;AAGlE,+EAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,+DAAwB,GACpB,mBAAmB,EAAE,YAAoB;AAG7C,gFAA2C,GACvC,mBAAmB,EAAE,YAA8B;AAGvD,8EAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,oEAAwB,GACpB,mBAAmB,EAAE,YAAyB;AAGlD,qFAA2C,GACvC,mBAAmB,EAAE,YAAmC;;AAtDhE,0CAAwB,GACpB,mBAAmB,EAAE,YAAe;AAGxC,2DAA2C,GACvC,mBAAmB,EAAE,YAAyB;AAGlD,yDAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAI5C,gEAAwB,GACpB,mBAAmB,EAAE,WAA4B;AAGrD,iFAA2C,GACvC,mBAAmB,EAAE,WAAsC;AAG/D,iFAA2C,GACvC,mBAAmB,EAAE,WAA+B;AAGxD,kGAA4D,GACxD,mBAAmB,EAAE,YAAyC;AAGlE,+EAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,+DAAwB,GACpB,mBAAmB,EAAE,YAAoB;AAG7C,gFAA2C,GACvC,mBAAmB,EAAE,YAA8B;AAGvD,8EAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,oEAAwB,GACpB,mBAAmB,EAAE,YAAyB;AAGlD,qFAA2C,GACvC,mBAAmB,EAAE,YAAmC;;AAtDhE,wCAAwB,GACpB,mBAAmB,EAAE,YAAe;AAGxC,yDAA2C,GACvC,mBAAmB,EAAE,YAAyB;AAGlD,uDAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAI5C,8DAAwB,GACpB,mBAAmB,EAAE,WAA4B;AAGrD,+EAA2C,GACvC,mBAAmB,EAAE,WAAsC;AAG/D,+EAA2C,GACvC,mBAAmB,EAAE,WAA+B;AAGxD,gGAA4D,GACxD,mBAAmB,EAAE,YAAyC;AAGlE,6EAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,6DAAwB,GACpB,mBAAmB,EAAE,YAAoB;AAG7C,8EAA2C,GACvC,mBAAmB,EAAE,YAA8B;AAGvD,4EAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,kEAAwB,GACpB,mBAAmB,EAAE,YAAyB;AAGlD,mFAA2C,GACvC,mBAAmB,EAAE,YAAmC;;AAtDhE,gDAAwB,GACpB,mBAAmB,EAAE,YAAe;AAGxC,iEAA2C,GACvC,mBAAmB,EAAE,YAAyB;AAGlD,+DAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAI5C,sEAAwB,GACpB,mBAAmB,EAAE,WAA4B;AAGrD,uFAA2C,GACvC,mBAAmB,EAAE,WAAsC;AAG/D,uFAA2C,GACvC,mBAAmB,EAAE,WAA+B;AAGxD,wGAA4D,GACxD,mBAAmB,EAAE,YAAyC;AAGlE,qFAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,qEAAwB,GACpB,mBAAmB,EAAE,YAAoB;AAG7C,sFAA2C,GACvC,mBAAmB,EAAE,YAA8B;AAGvD,oFAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,0EAAwB,GACpB,mBAAmB,EAAE,YAAyB;AAGlD,2FAA2C,GACvC,mBAAmB,EAAE,YAAmC;;AAtDhE,iEAAwB,GACpB,mBAAmB,EAAE,YAAe;AAGxC,kFAA2C,GACvC,mBAAmB,EAAE,YAAyB;AAGlD,gFAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAI5C,uFAAwB,GACpB,mBAAmB,EAAE,WAA4B;AAGrD,wGAA2C,GACvC,mBAAmB,EAAE,WAAsC;AAG/D,wGAA2C,GACvC,mBAAmB,EAAE,WAA+B;AAGxD,yHAA4D,GACxD,mBAAmB,EAAE,YAAyC;AAGlE,sGAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,sFAAwB,GACpB,mBAAmB,EAAE,YAAoB;AAG7C,uGAA2C,GACvC,mBAAmB,EAAE,YAA8B;AAGvD,qGAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,2FAAwB,GACpB,mBAAmB,EAAE,YAAyB;AAGlD,4GAA2C,GACvC,mBAAmB,EAAE,YAAmC;;AAtDhE,+DAAwB,GACpB,mBAAmB,EAAE,YAAe;AAGxC,gFAA2C,GACvC,mBAAmB,EAAE,YAAyB;AAGlD,8EAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAI5C,qFAAwB,GACpB,mBAAmB,EAAE,WAA4B;AAGrD,sGAA2C,GACvC,mBAAmB,EAAE,WAAsC;AAG/D,sGAA2C,GACvC,mBAAmB,EAAE,WAA+B;AAGxD,uHAA4D,GACxD,mBAAmB,EAAE,YAAyC;AAGlE,oGAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,oFAAwB,GACpB,mBAAmB,EAAE,YAAoB;AAG7C,qGAA2C,GACvC,mBAAmB,EAAE,YAA8B;AAGvD,mGAAyC,GACrC,mBAAmB,EAAE,YAAuB;AAKhD,yFAAwB,GACpB,mBAAmB,EAAE,YAAyB;AAGlD,0GAA2C,GACvC,mBAAmB,EAAE,YAAmC;;AAtDhE,6CAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,8DAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,4DAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,mEAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,oFAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,oFAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,qGAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,kFAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,kEAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,mFAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,iFAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,uEAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,wFAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AAtDhE,uDAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,wEAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,sEAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,6EAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,8FAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,8FAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,+GAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,4FAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,4EAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,6FAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,2FAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,iFAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,kGAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AAtDhE,iDAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,kEAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,gEAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,uEAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,wFAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,wFAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,yGAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,sFAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,sEAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,uFAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,qFAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,2EAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,4FAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AAtDhE,uCAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,wDAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,sDAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,6DAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,8EAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,8EAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,+FAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,4EAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,4DAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,6EAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,2EAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,iEAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,kFAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AAtDhE,sCAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,uDAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,qDAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,4DAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,6EAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,6EAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,8FAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,2EAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,2DAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,4EAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,0EAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,gEAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,iFAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AAtDhE,wDAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,yEAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,uEAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,8EAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,+FAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,+FAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,gHAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,6FAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,6EAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,8FAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,4FAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,kFAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,mGAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AAtDhE,sDAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,uEAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,qEAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,4EAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,6FAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,6FAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,8GAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,2FAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,2EAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,4FAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,0FAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,gFAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,iGAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AAtDhE,8DAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,+EAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,6EAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,oFAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,qGAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,qGAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,sHAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,mGAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,mFAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,oGAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,kGAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,wFAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,yGAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AAtDhE,qDAAwB,GACpB,mBAAmB,EAAE,aAAe;AAGxC,sEAA2C,GACvC,mBAAmB,EAAE,aAAyB;AAGlD,oEAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAI5C,2EAAwB,GACpB,mBAAmB,EAAE,YAA4B;AAGrD,4FAA2C,GACvC,mBAAmB,EAAE,YAAsC;AAG/D,4FAA2C,GACvC,mBAAmB,EAAE,YAA+B;AAGxD,6GAA4D,GACxD,mBAAmB,EAAE,aAAyC;AAGlE,0FAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,0EAAwB,GACpB,mBAAmB,EAAE,aAAoB;AAG7C,2FAA2C,GACvC,mBAAmB,EAAE,aAA8B;AAGvD,yFAAyC,GACrC,mBAAmB,EAAE,aAAuB;AAKhD,+EAAwB,GACpB,mBAAmB,EAAE,aAAyB;AAGlD,gGAA2C,GACvC,mBAAmB,EAAE,aAAmC;;AC/J5E,cAAc,GACV,UAAU,EAAE,eAAe;;4BAIvB,OAAO,EAAE,CAAC;OAEV,OAAO,EAAE,CAAC;6BAIV,OAAO,EAAE,CAAC,EACV,UAAU,EAAE,OAAO;OAEnB,OAAO,EAAE,CAAC;kCAIV,OAAO,EAAE,CAAC;QAEV,OAAO,EAAE,CAAC;SAEV,OAAO,EAAE,CAAC;mCAIV,OAAO,EAAE,CAAC,EACV,UAAU,EAAE,OAAO;QAEnB,OAAO,EAAE,CAAC;SAEV,OAAO,EAAE,CAAC;kCAIV,SAAS,EAAE,eAAc;OAEzB,SAAS,EAAE,kBAAiB;oCAI5B,SAAS,EAAE,kBAAiB;OAE5B,SAAS,EAAE,eAAc;sCAIzB,SAAS,EAAE,kBAAiB;OAE5B,SAAS,EAAE,eAAc;qCAIzB,SAAS,EAAE,eAAc,EACzB,UAAU,EAAE,OAAO;OAEnB,SAAS,EAAE,kBAAiB;ACxDpC,IAAI,GACA,UAAU,ECYK,OAAO,EDXtB,WAAW,ECAD,sBAAsB,EDChC,SAAS,ECED,IAAI,EDDZ,KAAK,ECUI,IAAI;;ADRjB,CAAC,GACG,KAAK,ECSI,OAAO,EDRhB,eAAe,EAAE,IAAI;AAErB,OAAO,GACH,eAAe,EAAE,SAAS;;AAElC,SAAS,GACL,WAAW,ECXI,iDAAiD,EDYhE,OAAO,EAAE,KAAK,EACd,MAAM,EAAE,CAAC,EACT,SAAS,ECXI,IAAI,EDYjB,gBAAgB,ECUI,mBAAgB;;ADRxC,GAAG,GACC,OAAO,EAAE,IAAI;AAEb,QAAI,GACA,OAAO,EAAE,CAAC,EACV,SAAS,EAAE,IAAI,EACf,gBAAgB,EAAE,WAAW;;AAErC,eAAe,GACX,WAAW,ECrBD,OAAO;ADuBjB,kBAAE,GACE,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,UAAU,EACnB,MAAM,EAAE,CAAC;AAEb,oIAAU,GACN,SAAS,EAAE,GAAG,EACd,MAAM,EAAE,CAAC;AAEb,sCAAM,GACF,WAAW,EAAE,MAAM;AAEvB,yDAAS,GACL,MAAM,EAAE,KAAK;;AHjCjB,iDAAiD,GKT7C,yBAAY,GACR,KAAK,EAAE,GAAG;EAEd,sBAAS,GACL,KAAK,EAAE,GAAG;EAEd,4BAAe,GACX,YAAY,EAAE,IAAI;ALY1B,yBAAyB,GKTrB,yBAAY,GACR,KAAK,EAAE,IAAI,EACX,KAAK,EAAE,IAAI;EAEf,sBAAS,GACL,QAAQ,EAAE,gBAAgB,EAC1B,QAAQ,EAAE,IAAI,EACd,0BAA0B,EAAE,KAAK,EACjC,kBAAkB,EAAE,KAAK,EACzB,OAAO,EAAE,IAAI,EACb,GAAG,EAAE,YAAY,EACjB,MAAM,EAAE,YAAY,EACpB,IAAI,EAAE,eAAe,EACrB,KAAK,EAAE,YAAY,EACnB,KAAK,EAAE,IAAI,EACX,OAAO,EAAE,aAAa,EACtB,SAAS,EAAE,KAAK,EAChB,UAAU,EAAE,MAAM,EAClB,gBAAgB,EDRd,IAAI,ECSN,SAAS,EAAE,kBAAiB;EAE5B,qCAAc,GACV,cAAc,EAAE,IAAI;EAE5B,qBAAQ,GACJ,OAAO,EAAE,EAAE,EACX,OAAO,EAAE,KAAK,EACd,QAAQ,EAAE,KAAK,EACf,OAAO,EAAE,IAAI,EACb,GAAG,EAAE,CAAC,EACN,IAAI,EAAE,CAAC,EACP,KAAK,EAAE,CAAC,EACR,MAAM,EAAE,CAAC,EACT,gBAAgB,EAAE,mBAAgB,EAClC,UAAU,EAAE,MAAM;EAGlB,iCAAQ,GACJ,SAAS,EAAE,YAAY;EAE3B,uGAAO,GAGH,SAAS,EAAE,kBAAkB;EAEjC,kCAAS,GACL,SAAS,EAAE,sBAAsB;EAGrC,mCAAQ,GACJ,SAAS,EAAE,aAAa;EAE5B,6GAAO,GAGH,SAAS,EAAE,oBAAoB;EAEnC,oCAAS,GACL,SAAS,EAAE,qBAAqB;EAGpC,0BAAI,GACA,QAAQ,EAAE,MAAM;EAEpB,8BAAQ,GACJ,UAAU,EAAE,OAAO;EAEvB,8FAAO,GAGH,SAAS,EAAE,kBAAkB;EAEjC,+BAAS,GACL,UAAU,EAAE,OAAO,EACnB,SAAS,EAAE,eAAc;;AAEzC,eAAe,GACX,OAAO,EAAE,aAAa,EACtB,MAAM,EAAE,UAAU,EAClB,UAAU,EDrEA,IAAI,ECsEd,UAAU,EAAE,2BAAwB;AAEpC,kBAAE,GACE,MAAM,EAAE,CAAC;;AAEjB,eAAe,GACX,MAAM,EAAE,CAAC,EACT,OAAO,EAAE,CAAC,EACV,KAAK,EDrFU,OAAO;ACuFtB,iBAAC,GACG,KAAK,EDxFM,OAAO,ECyFlB,eAAe,EAAE,IAAI;AAErB,uBAAO,GACH,eAAe,EAAE,SAAS;AAElC,kBAAE,GACE,OAAO,EAAE,MAAM;AAEf,wBAAO,GACH,OAAO,EAAE,KAAK;;AChHtB,uBAAU,GACN,MAAM,EAAE,CAAC;AAEb,4BAAe,GACX,WAAW,EAAE,IAAI,EACjB,cAAc,EAAE,CAAC;AAErB,0BAAa,GACT,YAAY,EAAE,KAAK;AAEvB,4BAAe,GACX,QAAQ,EAAE,gBAAgB,EAC1B,QAAQ,EAAE,IAAI,EACd,0BAA0B,EAAE,KAAK,EACjC,kBAAkB,EAAE,KAAK,EACzB,UAAU,EAAE,UAAU,EACtB,OAAO,EAAE,CAAC,EACV,IAAI,EAAE,CAAC,EACP,GAAG,EAAE,IAAI,EACT,MAAM,EAAE,CAAC,EACT,KAAK,EAAE,KAAK,EACZ,OAAO,EAAE,IAAI,EACb,MAAM,EAAE,CAAC;AAEb,oCAAuB,GACnB,WAAW,EAAE,CAAC;AAElB,8BAAiB,GACb,QAAQ,EAAE,KAAK,EACf,OAAO,EAAE,CAAC;AAEd,0CAA6B,GACzB,KAAK,EAAE,CAAC,EACR,SAAS,EAAE,IAAI;AAEnB,mBAAM,GACF,gBAAgB,EAAE,WAAW;AAE7B,8BAAU,GACN,OAAO,EAAE,CAAC;AAElB,2BAAc,GACV,OAAO,EAAE,CAAC;ANtBd,yBAAyB,GMyBrB,4BAAe,GACX,OAAO,EAAE,IAAI;EACjB,0BAAa,GACT,YAAY,EAAE,CAAC;;ACtC3B,mBAAmB,GACf,QAAQ,EAAE,MAAM;AAEhB,sBAAE,GACE,KAAK,EAAE,IAAI,EACX,KAAK,EAAE,IAAI,EACX,OAAO,EAAE,OAAO,EAChB,MAAM,EAAE,UAAU,EAClB,aAAa,EAAE,GAAG,EAClB,MAAM,EAAE,iBAA4B,EACpC,KAAK,EHIO,OAAO,EGHnB,SAAS,EAAE,KAAK,EAChB,WAAW,EAAE,MAAM;AAEvB,sBAAE,GACE,MAAM,EAAE,UAAU;AAEtB,qBAAC,GACG,MAAM,EAAE,CAAC;;AAYjB,4BAA4B,GACxB,SAAS,EAAE,KAAK,EAChB,WAAW,EHnCD,OAAO,EGoCjB,aAAa,EAAE,GAAG;AAElB,uCAAY,GACR,aAAa,EAAE,CAAC;;AC7CxB,iCAAiC,GAC7B,OAAO,EAAE,IAAI;;AAEjB,0GAA+B,GAG3B,OAAO,EAAE,IAAI;;AAEjB,mCAAmC,GAC/B,OAAO,EAAE,IAAI;;AAEjB,0CAA0C,GACtC,OAAO,EAAE,IAAI;;AAEjB,kCAAkC,GAC9B,OAAO,EAAE,IAAI;;AAKjB,WAAW,GACP,QAAQ,EAAE,QAAQ,EAClB,OAAO,EAAE,YAAY,EACrB,MAAM,EJaO,IAAI,EIZjB,cAAc,EAAE,MAAM;AAEtB,sBAAY,GACR,OAAO,EAAE,IAAI;AAEjB,6BAAiB,GACb,OAAO,EAAE,YAAY,EACrB,MAAM,EJKG,IAAI,EIJb,cAAc,EAAE,MAAM,EACtB,WAAW,EAAE,MAAM;AAEvB,iBAAK,GACD,OAAO,EAAE,IAAI;ARjBjB,yBAAyB,GQoBrB,6BAAiB,GACb,OAAO,EAAE,KAAK,EACd,QAAQ,EAAE,QAAQ,EAClB,GAAG,EJNE,IAAI,EIOT,KAAK,EAAE,IAAI,EACX,MAAM,EAAE,IAAI,EACZ,gBAAgB,EJzBd,IAAI,EI0BN,UAAU,EAAE,MAAM,EAClB,SAAS,EAAE,iBAAgB,EAC3B,UAAU,EAAE,2BAAwB;EAEpC,0CAAc,GACV,UAAU,EAAE,OAAO;EAEvB,6CAAiB,GACb,SAAS,EAAE,YAAY;EAE3B,+CAAmB,GACf,SAAS,EAAE,aAAa;EAEhC,0CAAM,GAEF,OAAO,EAAE,KAAK,EACd,aAAa,EAAE,IAAI;;AChE/B,MAAM,GACF,UAAU,EAAE,cAA8B,EAC1C,gBAAgB,ELoBN,IAAI;AKlBd,yBAAoB,GAChB,aAAa,EAAE,cAA8B;AAEjD,wBAAiB,GACb,SAAS,EAAE,CAAC;AAEhB,kBAAW,GACP,OAAO,EAAE,YAAY,EACrB,KAAK,EAAE,GAAG,EACV,OAAO,EAAE,CAAC,EACV,SAAS,ELTL,IAAI,EKUR,UAAU,EAAE,IAAI,EAChB,WAAW,ELRL,OAAO,EKSb,cAAc,EAAE,GAAG;ATIvB,yBAAyB,GACrB,kBAAC,GSFG,KAAK,EAAE,GAAG;;ACHtB,cAAc,GACV,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,UAAU,EACnB,MAAM,EAAE,CAAC;AAET,sBAAO,GACH,WAAW,EAAE,IAAI;;ACArB,mCAAkB,GACd,aAAa,EAAE,gBAAgB;AAEnC,mCAAkB,GACd,aAAa,EAAE,eAAe;AAElC,mBAAE,GAEE,MAAM,EAAE,kBAAkB,EAC1B,OAAO,EAAE,gBAAgB,EACzB,aAAa,EAAE,cAA8B;AAEjD,kCAAiB,GZlCjB,oBAAoB,EAAE,CAAM,EAC5B,iBAAiB,EAAE,CAAM,EACzB,gBAAgB,EAAE,CAAM,EACxB,eAAe,EAAE,CAAM,EACvB,YAAY,EAAE,CAAM,EAJpB,kBAAoB,EAAE,IAAM,EAC5B,eAAiB,EAAE,IAAM,EACzB,cAAgB,EAAE,IAAM,EACxB,aAAe,EAAE,IAAM,EACvB,UAAY,EAAE,IAAM,EYiChB,OAAO,EAAE,CAAC,EACV,UAAU,EAAE,IAAI,EAChB,WAAW,EPhCL,OAAO;AJajB,yBAAyB,GACrB,kCAAC,GDrBL,oBAAoB,EAAE,CAAM,EAC5B,iBAAiB,EAAE,CAAM,EACzB,gBAAgB,EAAE,CAAM,EACxB,eAAe,EAAE,CAAM,EACvB,YAAY,EAAE,CAAM;ACMpB,iDAAiD,GAC7C,kCAAC,GDXL,oBAAoB,EAAE,CAAM,EAC5B,iBAAiB,EAAE,CAAM,EACzB,gBAAgB,EAAE,CAAM,EACxB,eAAe,EAAE,CAAM,EACvB,YAAY,EAAE,CAAM;AY2ChB,qCAAE,GZ/CN,2BAAoB,EAAE,KAAM,EAC5B,wBAAiB,EAAE,KAAM,EACzB,uBAAgB,EAAE,KAAM,EACxB,sBAAe,EAAE,KAAM,EACvB,mBAAY,EAAE,KAAM,EAJpB,yBAAoB,EAAE,KAAM,EAC5B,sBAAiB,EAAE,KAAM,EACzB,qBAAgB,EAAE,KAAM,EACxB,oBAAe,EAAE,KAAM,EACvB,iBAAY,EAAE,KAAM;AY+CpB,8DAAE,GAEE,KAAK,EPxBF,OAAO;AO0Bd,6CAA4B,GACxB,KAAK,EP1BQ,OAAO;AO4BxB,wCAAuB,GACnB,KAAK,EP5BG,OAAO;AO8BnB,yCAAwB,GACpB,KAAK,EP9BI,OAAO;AOiCpB,mCAAkB,GACd,KAAK,EPrCF,OAAO;AOuCd,sCAAqB,GACjB,KAAK,EPvCQ,OAAO;AOyCxB,iCAAgB,GACZ,KAAK,EPzCG,OAAO;AO2CnB,kCAAiB,GACb,KAAK,EP3CI,OAAO;AO6CpB,kCAAiB,GACb,KAAK,EP7CM,OAAO;;AQlC1B,SAAS,GACL,OAAO,EAAE,YAAY,EACrB,OAAO,EAAE,OAAO,EAChB,aAAa,EAAE,GAAG,EAClB,KAAK,ERsBgB,IAAI,EQrBzB,gBAAgB,ERoBA,OAAO,EQnBvB,WAAW,EAAE,CAAC,EACd,SAAS,ERDI,IAAI,EQEjB,WAAW,EAAE,MAAM;;AAEvB,WAAW,GACP,QAAQ,EAAE,QAAQ,EAClB,GAAG,EAAE,MAAM;;AAEf,WAAW,GACP,QAAQ,EAAE,QAAQ;AAElB,4BAAgB,GACZ,UAAU,EAAE,CAAC,EACb,aAAa,EAAE,CAAC,EAChB,aAAa,EAAE,IAAI;;ACN3B,eAAe,GACX,OAAO,EAAE,UAAU;AAEnB,iBAAC,GACG,OAAO,EAAE,KAAK,EACd,WAAW,EAAE,GAAG,EAChB,cAAc,EAAE,GAAG,EACnB,WAAW,EAAE,qBAAqB,EAClC,KAAK,ETRA,IAAI,ESST,eAAe,EAAE,IAAI,EACrB,UAAU,EAAE,sBAAsB;AAElC,uBAAO,GACH,eAAe,EAAE,SAAS;AAElC,kBAAE,GACE,MAAM,EAAE,CAAC,EACT,OAAO,EAAE,CAAC,EACV,UAAU,EAAE,IAAI;AAEpB,kBAAE,GACE,OAAO,EAAE,CAAC;;AAmBlB,uBAAuB,GACnB,cAAc,EAAE,IAAI;AAEpB,yBAAC,GACG,OAAO,EAAE,KAAK,EACd,WAAW,EAAE,GAAG,EAChB,cAAc,EAAE,GAAG;AArDnB,+BAAG,GACC,YAAY,EAAE,GAAmC;AADrD,kCAAG,GACC,YAAY,EAAE,IAAmC;AADrD,qCAAG,GACC,YAAY,EAAE,IAAmC;AADrD,wCAAG,GACC,YAAY,EAAE,IAAmC;AADrD,2CAAG,GACC,YAAY,EAAE,IAAmC;AADrD,8CAAG,GACC,YAAY,EAAE,KAAmC;AAyDzD,4BAAI,GACA,aAAa,EAAE,cAA8B;AAEjD,0BAAE,GACE,UAAU,EAAE,cAA8B;AAE1C,sCAAa,GACT,WAAW,EAAE,IAAI;AAErB,qCAAY,GACR,OAAO,EAAE,KAAK,EACd,OAAO,EAAE,cAAc,EACvB,KAAK,ETzDE,OAAO;AS2DlB,2FAAsB,GAElB,WAAW,EAAE,IAAI;;AA+BzB,4BAAE,GAEE,UAAU,EAAE,YAAY;AA3GxB,iCAAG,GACC,YAAY,EAAE,IAAmC;AADrD,oCAAG,GACC,YAAY,EAAE,IAAmC;AADrD,uCAAG,GACC,YAAY,EAAE,IAAmC;AADrD,0CAAG,GACC,YAAY,EAAE,IAAmC;AADrD,6CAAG,GACC,YAAY,EAAE,KAAmC;AADrD,gDAAG,GACC,YAAY,EAAE,KAAmC;AA4GrD,sCAAW,GACP,iBAAiB,ET9FP,IAAI;ASgGtB,yFAAa,GAET,iBAAiB,ETtGE,IAAI;ASwG3B,oCAAU,GACN,UAAU,EAAE,IAAI,EAChB,aAAa,EAAE,IAAI,EACnB,iBAAiB,ETvGH,IAAI;ASyGlB,wCAAG,GACC,WAAW,EAAE,IAAI;;AbvGzB,yBAAyB,GACrB,iBAAC,Ga6GD,QAAQ,EAAE,MAAM;EAGZ,8CAAQ,GACJ,QAAQ,EAAE,KAAK;EAEnB,sDAAgB,GACZ,QAAQ,EAAE,KAAK;EAEf,iJAAkB,GAEd,OAAO,EAAE,CAAC;EAElB,qDAAe,GACX,QAAQ,EAAE,QAAQ,EAClB,GAAG,EAAE,eAAe,EACpB,IAAI,EAAE,eAAe,EACrB,MAAM,EAAE,CAAC,EACT,KAAK,EAAE,CAAC;EAGZ,2CAAQ,GACJ,QAAQ,EAAE,MAAM;EAEpB,mDAAgB,GACZ,QAAQ,EAAE,MAAM;;ACzJhC,UAAU,GAEN,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,IAAI,EACb,gBAAgB,EVUN,IAAI,EUTd,UAAU,EAAE,2BAAwB;AAEpC,gBAAO,GACH,OAAO,EAAE,IAAI;AAEjB,iDAAgB,GACZ,MAAM,EAAE,sBAAsB,EAC9B,OAAO,EAAE,gBAAgB,EACzB,aAAa,EAAE,cAA8B;AAE7C,gHAAsB,GAClB,aAAa,EAAE,CAAC,EAChB,aAAa,EAAE,CAAC;AAExB,gBAAK,GACD,OAAO,EAAE,KAAK,EACd,KAAK,EAAE,IAAI,EACX,QAAQ,EAAE,IAAI,EACd,UAAU,EAAE,IAAI,EAChB,UAAU,EAAE,MAAM,EAClB,UAAU,EAAE,QAAQ;AAEpB,mBAAE,GACE,WAAW,EAAE,IAAI;AAErB,wCAAM,GACF,OAAO,EAAE,QAAQ,EACjB,MAAM,EAAE,cAAc;AAE1B,mBAAE,GACE,gBAAgB,EAAE,IAAI,EACtB,UAAU,EAAE,cAAc;AAE1B,iCAAe,GACX,gBAAgB,EAAE,OAAO;;AAiBzC,gBAAgB,GACZ,MAAM,EAAE,MAAM;AAEd,mEAAgB,GACZ,YAAY,EAAE,IAAI,EAClB,aAAa,EAAE,IAAI;;ACrE3B,WAAW,GACP,UAAU,EAAE,qBAAqB;AAEjC,kBAAM,GACF,QAAQ,EAAE,QAAQ,EAClB,OAAO,EAAE,CAAC;AAEd,kBAAM,GACF,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,CAAC,EACP,GAAG,EAAE,CAAC,EACN,KAAK,EAAE,IAAI,EACX,MAAM,EAAE,IAAI;AAEZ,wBAAK,GACD,UAAU,EAAE,UAAU,EACtB,QAAQ,EAAE,QAAQ,EAClB,GAAG,EAAE,KAAK,EACV,OAAO,EAAE,CAAC,EACV,KAAK,EAAE,IAAI,EACX,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,CAAC,EACV,OAAO,EAAE,CAAC,EACV,MAAM,EAAE,CAAC,EACT,UAAU,EAAE,WAAW,EACvB,KAAK,EXXJ,IAAI;AWaT,wBAAK,GACD,QAAQ,EAAE,QAAQ,EAClB,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,KAAK;AAEpB,4CAAa,GAET,UAAU,EAAE,YAAY;AAE5B,oBAAQ,GACJ,QAAQ,EAAE,QAAQ,EAClB,UAAU,EAAE,MAAM,EAClB,GAAG,EAAE,IAAI,EACT,KAAK,EAAE,IAAI,EACX,MAAM,EAAE,CAAC,EACT,OAAO,EAAE,CAAC,EACV,UAAU,EAAE,IAAI,EAChB,UAAU,EAAE,2BAAwB;AAEpC,uBAAE,GACE,OAAO,EAAE,MAAM,EACf,gBAAgB,EXnCT,OAAO;AWqClB,uCAAkB,GACd,gBAAgB,EX7Bd,IAAI;AW+BV,6BAAQ,GACJ,OAAO,EAAE,IAAI;AAEjB,8DAAW,GAEP,gBAAgB,EXnCN,IAAI;AWqClB,sBAAC,GACG,OAAO,EAAE,KAAK;AAEd,6BAAQ,GACJ,GAAG,EAAE,IAAI;AAEjB,gCAAW,GACP,KAAK,EXpDE,OAAO,EWqDd,WAAW,EAAE,MAAM;AAE3B,qBAAW,GACP,gBAAgB,EXhDF,IAAI;AWkDlB,kCAAY,GACR,GAAG,EAAE,CAAC,EACN,OAAO,EAAE,CAAC;AAEd,4BAAM,GACF,OAAO,EAAE,CAAC,EACV,OAAO,EAAE,CAAC;AAEd,8BAAQ,GACJ,UAAU,EAAE,OAAO;AAE3B,6CAAmC,GAC/B,OAAO,EAAE,KAAK;AAElB,6CAAmC,GAC/B,OAAO,EAAE,KAAK;;AC3EtB,cAAc,GACV,MAAM,EAAE,SAAS,EACjB,OAAO,EAAE,IAAI,EACb,MAAM,EAAE,cAA8B,EACtC,WAAW,EZdI,iDAAiD,EYehE,SAAS,EZZI,IAAI;AYcjB,4BAAe,GACX,YAAY,EAAE,IAAI;AAElB,mCAAQ,GACJ,GAAG,EAAE,IAAI,EACT,IAAI,EAAE,IAAI;AAElB,2BAAc,GACV,WAAW,EAAE,KAAK,EAClB,YAAY,EAAE,KAAK,EACnB,YAAY,EAAE,KAAK;AAEnB,yCAAe,GACX,YAAY,EAAE,IAAI;AAElB,gDAAQ,GACJ,IAAI,EAAE,IAAI;;AAE1B,qBAAqB,GACjB,KAAK,EZxBU,OAAO,EYyBtB,WAAW,EAAE,MAAM;;AAEvB,mBAAmB,GACf,UAAU,EAAE,MAAM,EAClB,WAAW,EAAE,MAAM;;AAYvB,eAAe,GACX,OAAO,EAAE,CAAC,EACV,MAAM,EAAE,SAAS,EACjB,MAAM,EAAE,cAA8B;AAEtC,8BAAc,GACV,MAAM,EAAE,CAAC,EACT,YAAY,EAAE,SAAS,EACvB,UAAU,EAAE,qBAAqB;AAEjC,0CAAa,GACT,gBAAgB,EAAE,CAAC;AAEvB,sCAAS,GACL,gBAAgB,EZ/CN,IAAI;AYiDtB,uCAAyB,GACrB,MAAM,EAAE,OAAO;AAEnB,4BAAc,GACV,WAAW,EAAE,KAAK,EAClB,YAAY,EAAE,KAAK,EACnB,YAAY,EAAE,KAAK;AAEnB,yDAA4B,GACxB,YAAY,EAAE,IAAI;AAElB,gEAAQ,GACJ,IAAI,EAAE,IAAI;AAEtB,uCAAyB,GACrB,gBAAgB,EAAE,CAAC,EACnB,UAAU,EAAE,KAAK;;AAezB,mBAAmB,GACf,QAAQ,EAAE,QAAQ,EAClB,QAAQ,EAAE,MAAM,EAChB,UAAU,EAAE,WAAW,EACvB,OAAO,EAAE,CAAC,EACV,UAAU,EAAE,IAAI;AAKhB,6CAA2B,GACvB,OAAO,EAAE,IAAI;AAEb,qDAAS,GACL,OAAO,EAAE,KAAK;AAElB,qDAAS,GACL,SAAS,EAAE,oBAAoB;AAEnC,sDAAU,GACN,SAAS,EAAE,qBAAqB,EAChC,QAAQ,EAAE,QAAQ,EAClB,OAAO,EAAE,KAAK,EACd,GAAG,EAAE,CAAC,EACN,IAAI,EAAE,CAAC,EACP,KAAK,EAAE,CAAC,EACR,OAAO,EAAE,CAAC,EACV,UAAU,EAAE,MAAM;AAE1B,wGAAE,GACE,SAAS,EZhIL,IAAI,EYiIR,MAAM,EAAE,aAAa;;AAE7B,yCAAkB,GAEd,UAAU,EAAE,MAAM,EAClB,MAAM,EAAE,CAAC,EACT,YAAY,EAAE,IAAI;AAElB,mGAA4B,GACxB,UAAU,EAAE,IAAI,EAChB,WAAW,EAAE,KAAK;AAEtB,+CAAE,GACE,SAAS,EZ9IL,IAAI,EY+IR,MAAM,EAAE,aAAa;AAEzB,mEAAY,GACR,UAAU,EAAE,MAAM;;AC9I1B,YAAY,GACR,SAAS,EbJI,IAAI,EaKjB,KAAK,EbIU,OAAO,EaHtB,MAAM,EAAE,SAAS;AAEjB,cAAC,GACG,KAAK,EbAM,OAAO,EaClB,eAAe,EAAE,SAAS;AAE9B,+BAAK,GACD,MAAM,EAAE,YAAY;AAExB,eAAE,GACE,UAAU,EAAE,IAAI,EAChB,OAAO,EAAE,CAAC;;ACXlB,iBAAiB,GACb,QAAQ,EAAE,QAAQ,EAClB,OAAO,EAAE,CAAC,EACV,GAAG,EAAE,CAAC,EACN,IAAI,EAAE,CAAC,EACP,KAAK,EAAE,IAAI,EACX,MAAM,EdoBO,IAAI,EcnBjB,KAAK,EdkBY,IAAI,EcjBrB,UAAU,EdgBE,IAAI,EcfhB,aAAa,EAAE,cAA8B;AAE7C,mBAAC,GACG,KAAK,EdaQ,IAAI,EcZjB,eAAe,EAAE,IAAI;AAErB,yBAAO,GACH,WAAW,EAAE,IAAI;AAErB,+BAAa,GACT,eAAe,EAAE,SAAS;AAElC,6BAAW,GACP,OAAO,EAAE,KAAK,EACd,KAAK,EAAE,IAAI,EACX,MAAM,EdEG,IAAI;AcAjB,6BAAW,GACP,OAAO,EAAE,UAAU,EACnB,QAAQ,EAAE,QAAQ,EAClB,WAAW,EAAE,MAAM,EACnB,WAAW,EdJF,IAAI;AcMb,yCAAa,GACT,KAAK,EAAE,IAAI;;AAGnB,gGAAQ,GACJ,OAAO,EAAE,EAAE,EACX,OAAO,EAAE,YAAY,EACrB,KAAK,EAAE,IAAI,EACX,MAAM,EAAE,IAAI,EACZ,MAAM,EAAE,UAAU,EAClB,gBAAgB,EAAE,0BAA0B,EAC5C,iBAAiB,EAAE,SAAS,EAC5B,WAAW,EAAE,OAAO,EACpB,cAAc,EAAE,MAAM;AnBzC1B,qGAAqG,GACjG,gGAAC,GmB2CG,gBAAgB,EAAE,6BAA6B,EAC/C,eAAe,EAAE,UAAU;;AAEvC,WAAW,GAEP,OAAO,EAAE,YAAY,EACrB,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,GAAG,EACZ,MAAM,Ed9BO,IAAI,Ec+BjB,UAAU,EAAE,mCAAmC,EAC/C,cAAc,EAAE,MAAM,EACtB,MAAM,EAAE,OAAO;AAEf,iBAAO,GACH,OAAO,EAAE,GAAG;AAEhB,kBAAQ,GACJ,OAAO,EAAE,CAAC,EACV,gBAAgB,EdvDF,IAAI;AcyDtB,sBAAY,GACR,KAAK,EAAE,IAAI;AAEX,6BAAQ,GACJ,MAAM,EAAE,CAAC;AAEjB,yBAAe,GACX,mBAAmB,EAAE,GAAG;AAE5B,uBAAa,GACT,mBAAmB,EAAE,OAAO;AAEhC,0BAAgB,GACZ,mBAAmB,EAAE,OAAO;AAEhC,qCAAU,GAEN,OAAO,EAAE,IAAI;AlB5EjB,yBAAyB,GACrB,qCAAC,GkB8EG,OAAO,EAAE,YAAY;AAE7B,yCAA+B,GAC3B,mBAAmB,EAAE,QAAQ;AAEjC,iDAAuC,GACnC,mBAAmB,EAAE,QAAQ;;AAErC,WAAW,GACP,QAAQ,EAAE,QAAQ,EAClB,OAAO,EAAE,YAAY,EACrB,MAAM,EdzEO,IAAI,Ec0EjB,UAAU,EAAE,mCAAmC,EAC/C,cAAc,EAAE,MAAM,EACtB,MAAM,EAAE,OAAO;AAEf,6BAAiB,GAEb,OAAO,EAAE,GAAG,EACZ,UAAU,EAAE,YAAY;AAExB,oCAAQ,GACJ,mBAAmB,EAAE,QAAQ;AAGjC,oCAAiB,GACb,OAAO,EAAE,GAAG;AAEhB,mCAAgB,GACZ,UAAU,EAAE,OAAO,EACnB,OAAO,EAAE,CAAC,EACV,gBAAgB,EAAE,EAAE;AAE5B,4BAAgB,GACZ,QAAQ,EAAE,QAAQ,EAClB,UAAU,EAAE,MAAM,EAClB,GAAG,EdlGM,IAAI,EcmGb,IAAI,EAAE,CAAC,EACP,MAAM,EAAE,CAAC,EACT,OAAO,EAAE,CAAC,EACV,OAAO,EAAE,CAAC,EACV,UAAU,EAAE,IAAI,EAChB,UAAU,EAAE,2BAAwB,EACpC,UAAU,EAAE,gCAAgC;AAE5C,+BAAE,GAEE,OAAO,EAAE,UAAU,EACnB,gBAAgB,EdvIT,OAAO;AcyId,sCAAQ,GACJ,mBAAmB,EAAE,MAAM;AAE/B,+CAAiB,GACb,gBAAgB,EdpIlB,IAAI;AcsIN,qCAAO,GACH,gBAAgB,EdtIV,IAAI;AcwId,+CAAiB,GACb,mBAAmB,EAAE,QAAQ;AlB3IzC,yBAAyB,GkB8IrB,4BAAgB,GACZ,GAAG,EAAE,CAAC,EACN,IAAI,EAAE,IAAI,EACV,KAAK,EAAE,IAAI,EACX,YAAY,EAAE,IAAI;EAEtB,oCAAwB,GACpB,mBAAmB,EAAE,QAAQ;;ACzKzC,GAAG,GACC,SAAS,EAAE,IAAI", -"sources": ["../../../../src/default/assets/css/vendors/_normalize.sass","../../../../src/default/assets/css/vendors/_highlight.js.sass","../../../../src/default/assets/css/setup/_mixins.sass","../../../../src/default/assets/css/setup/_grid.sass","../../../../src/default/assets/css/setup/_icons.scss","../../../../src/default/assets/css/setup/_animations.sass","../../../../src/default/assets/css/setup/_typography.sass","../../../../src/default/assets/css/_constants.sass","../../../../src/default/assets/css/layouts/_default.sass","../../../../src/default/assets/css/layouts/_minimal.sass","../../../../src/default/assets/css/elements/_comment.sass","../../../../src/default/assets/css/elements/_filter.sass","../../../../src/default/assets/css/elements/_footer.sass","../../../../src/default/assets/css/elements/_hierarchy.sass","../../../../src/default/assets/css/elements/_index.sass","../../../../src/default/assets/css/elements/_member.sass","../../../../src/default/assets/css/elements/_navigation.sass","../../../../src/default/assets/css/elements/_panel.sass","../../../../src/default/assets/css/elements/_search.sass","../../../../src/default/assets/css/elements/_signatures.sass","../../../../src/default/assets/css/elements/_sources.sass","../../../../src/default/assets/css/elements/_toolbar.sass","../../../../src/default/assets/css/elements/_images.sass"], -"names": [], -"file": "main.css" -} diff --git a/docs/assets/highlight.css b/docs/assets/highlight.css new file mode 100644 index 00000000..d0be61b3 --- /dev/null +++ b/docs/assets/highlight.css @@ -0,0 +1,78 @@ +:root { + --light-hl-0: #AF00DB; + --dark-hl-0: #C586C0; + --light-hl-1: #000000; + --dark-hl-1: #D4D4D4; + --light-hl-2: #001080; + --dark-hl-2: #9CDCFE; + --light-hl-3: #A31515; + --dark-hl-3: #CE9178; + --light-hl-4: #0000FF; + --dark-hl-4: #569CD6; + --light-hl-5: #0070C1; + --dark-hl-5: #4FC1FF; + --light-hl-6: #795E26; + --dark-hl-6: #DCDCAA; + --light-hl-7: #008000; + --dark-hl-7: #6A9955; + --light-code-background: #FFFFFF; + --dark-code-background: #1E1E1E; +} + +@media (prefers-color-scheme: light) { :root { + --hl-0: var(--light-hl-0); + --hl-1: var(--light-hl-1); + --hl-2: var(--light-hl-2); + --hl-3: var(--light-hl-3); + --hl-4: var(--light-hl-4); + --hl-5: var(--light-hl-5); + --hl-6: var(--light-hl-6); + --hl-7: var(--light-hl-7); + --code-background: var(--light-code-background); +} } + +@media (prefers-color-scheme: dark) { :root { + --hl-0: var(--dark-hl-0); + --hl-1: var(--dark-hl-1); + --hl-2: var(--dark-hl-2); + --hl-3: var(--dark-hl-3); + --hl-4: var(--dark-hl-4); + --hl-5: var(--dark-hl-5); + --hl-6: var(--dark-hl-6); + --hl-7: var(--dark-hl-7); + --code-background: var(--dark-code-background); +} } + +body.light { + --hl-0: var(--light-hl-0); + --hl-1: var(--light-hl-1); + --hl-2: var(--light-hl-2); + --hl-3: var(--light-hl-3); + --hl-4: var(--light-hl-4); + --hl-5: var(--light-hl-5); + --hl-6: var(--light-hl-6); + --hl-7: var(--light-hl-7); + --code-background: var(--light-code-background); +} + +body.dark { + --hl-0: var(--dark-hl-0); + --hl-1: var(--dark-hl-1); + --hl-2: var(--dark-hl-2); + --hl-3: var(--dark-hl-3); + --hl-4: var(--dark-hl-4); + --hl-5: var(--dark-hl-5); + --hl-6: var(--dark-hl-6); + --hl-7: var(--dark-hl-7); + --code-background: var(--dark-code-background); +} + +.hl-0 { color: var(--hl-0); } +.hl-1 { color: var(--hl-1); } +.hl-2 { color: var(--hl-2); } +.hl-3 { color: var(--hl-3); } +.hl-4 { color: var(--hl-4); } +.hl-5 { color: var(--hl-5); } +.hl-6 { color: var(--hl-6); } +.hl-7 { color: var(--hl-7); } +pre, code { background: var(--code-background); } diff --git a/docs/assets/icons.css b/docs/assets/icons.css new file mode 100644 index 00000000..776a3562 --- /dev/null +++ b/docs/assets/icons.css @@ -0,0 +1,1043 @@ +.tsd-kind-icon { + display: block; + position: relative; + padding-left: 20px; + text-indent: -20px; +} +.tsd-kind-icon:before { + content: ""; + display: inline-block; + vertical-align: middle; + width: 17px; + height: 17px; + margin: 0 3px 2px 0; + background-image: url(./icons.png); +} +@media (-webkit-min-device-pixel-ratio: 1.5), (min-resolution: 144dpi) { + .tsd-kind-icon:before { + background-image: url(./icons@2x.png); + background-size: 238px 204px; + } +} + +.tsd-signature.tsd-kind-icon:before { + background-position: 0 -153px; +} + +.tsd-kind-object-literal > .tsd-kind-icon:before { + background-position: 0px -17px; +} +.tsd-kind-object-literal.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -17px; +} +.tsd-kind-object-literal.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -17px; +} + +.tsd-kind-class > .tsd-kind-icon:before { + background-position: 0px -34px; +} +.tsd-kind-class.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -34px; +} +.tsd-kind-class.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -34px; +} + +.tsd-kind-class.tsd-has-type-parameter > .tsd-kind-icon:before { + background-position: 0px -51px; +} +.tsd-kind-class.tsd-has-type-parameter.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -17px -51px; +} +.tsd-kind-class.tsd-has-type-parameter.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -51px; +} + +.tsd-kind-interface > .tsd-kind-icon:before { + background-position: 0px -68px; +} +.tsd-kind-interface.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -68px; +} +.tsd-kind-interface.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -68px; +} + +.tsd-kind-interface.tsd-has-type-parameter > .tsd-kind-icon:before { + background-position: 0px -85px; +} +.tsd-kind-interface.tsd-has-type-parameter.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -17px -85px; +} +.tsd-kind-interface.tsd-has-type-parameter.tsd-is-private + > .tsd-kind-icon:before { + background-position: -34px -85px; +} + +.tsd-kind-namespace > .tsd-kind-icon:before { + background-position: 0px -102px; +} +.tsd-kind-namespace.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -102px; +} +.tsd-kind-namespace.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -102px; +} + +.tsd-kind-module > .tsd-kind-icon:before { + background-position: 0px -102px; +} +.tsd-kind-module.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -102px; +} +.tsd-kind-module.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -102px; +} + +.tsd-kind-enum > .tsd-kind-icon:before { + background-position: 0px -119px; +} +.tsd-kind-enum.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -119px; +} +.tsd-kind-enum.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -119px; +} + +.tsd-kind-enum-member > .tsd-kind-icon:before { + background-position: 0px -136px; +} +.tsd-kind-enum-member.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -136px; +} +.tsd-kind-enum-member.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -136px; +} + +.tsd-kind-signature > .tsd-kind-icon:before { + background-position: 0px -153px; +} +.tsd-kind-signature.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -153px; +} +.tsd-kind-signature.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -153px; +} + +.tsd-kind-type-alias > .tsd-kind-icon:before { + background-position: 0px -170px; +} +.tsd-kind-type-alias.tsd-is-protected > .tsd-kind-icon:before { + background-position: -17px -170px; +} +.tsd-kind-type-alias.tsd-is-private > .tsd-kind-icon:before { + background-position: -34px -170px; +} + +.tsd-kind-type-alias.tsd-has-type-parameter > .tsd-kind-icon:before { + background-position: 0px -187px; +} +.tsd-kind-type-alias.tsd-has-type-parameter.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -17px -187px; +} +.tsd-kind-type-alias.tsd-has-type-parameter.tsd-is-private + > .tsd-kind-icon:before { + background-position: -34px -187px; +} + +.tsd-kind-variable > .tsd-kind-icon:before { + background-position: -136px -0px; +} +.tsd-kind-variable.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -0px; +} +.tsd-kind-variable.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -0px; +} +.tsd-kind-variable.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -0px; +} +.tsd-kind-variable.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -0px; +} +.tsd-kind-variable.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -0px; +} +.tsd-kind-variable.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -0px; +} +.tsd-kind-variable.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -0px; +} +.tsd-kind-variable.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -0px; +} +.tsd-kind-variable.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -0px; +} +.tsd-kind-variable.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -0px; +} +.tsd-kind-variable.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -0px; +} +.tsd-kind-variable.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -0px; +} + +.tsd-kind-property > .tsd-kind-icon:before { + background-position: -136px -0px; +} +.tsd-kind-property.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -0px; +} +.tsd-kind-property.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -0px; +} +.tsd-kind-property.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -0px; +} +.tsd-kind-property.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -0px; +} +.tsd-kind-property.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -0px; +} +.tsd-kind-property.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -0px; +} +.tsd-kind-property.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -0px; +} +.tsd-kind-property.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -0px; +} +.tsd-kind-property.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -0px; +} +.tsd-kind-property.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -0px; +} +.tsd-kind-property.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -0px; +} +.tsd-kind-property.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -0px; +} + +.tsd-kind-get-signature > .tsd-kind-icon:before { + background-position: -136px -17px; +} +.tsd-kind-get-signature.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -17px; +} +.tsd-kind-get-signature.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -17px; +} +.tsd-kind-get-signature.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -17px; +} + +.tsd-kind-set-signature > .tsd-kind-icon:before { + background-position: -136px -34px; +} +.tsd-kind-set-signature.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -34px; +} +.tsd-kind-set-signature.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -34px; +} +.tsd-kind-set-signature.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -34px; +} + +.tsd-kind-accessor > .tsd-kind-icon:before { + background-position: -136px -51px; +} +.tsd-kind-accessor.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -51px; +} +.tsd-kind-accessor.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -51px; +} +.tsd-kind-accessor.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -51px; +} + +.tsd-kind-function > .tsd-kind-icon:before { + background-position: -136px -68px; +} +.tsd-kind-function.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -68px; +} +.tsd-kind-function.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-function.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -68px; +} +.tsd-kind-function.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -68px; +} +.tsd-kind-function.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -68px; +} +.tsd-kind-function.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -68px; +} +.tsd-kind-function.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-function.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -68px; +} +.tsd-kind-function.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -68px; +} +.tsd-kind-function.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-function.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -68px; +} +.tsd-kind-function.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -68px; +} + +.tsd-kind-method > .tsd-kind-icon:before { + background-position: -136px -68px; +} +.tsd-kind-method.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -68px; +} +.tsd-kind-method.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-method.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -68px; +} +.tsd-kind-method.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -68px; +} +.tsd-kind-method.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -68px; +} +.tsd-kind-method.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -68px; +} +.tsd-kind-method.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-method.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -68px; +} +.tsd-kind-method.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { + background-position: -187px -68px; +} +.tsd-kind-method.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-method.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -68px; +} +.tsd-kind-method.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -68px; +} + +.tsd-kind-call-signature > .tsd-kind-icon:before { + background-position: -136px -68px; +} +.tsd-kind-call-signature.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -68px; +} +.tsd-kind-call-signature.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -68px; +} +.tsd-kind-call-signature.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -68px; +} + +.tsd-kind-function.tsd-has-type-parameter > .tsd-kind-icon:before { + background-position: -136px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -153px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class + > .tsd-kind-icon:before { + background-position: -51px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-enum + > .tsd-kind-icon:before { + background-position: -170px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-interface + > .tsd-kind-icon:before { + background-position: -204px -85px; +} +.tsd-kind-function.tsd-has-type-parameter.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -85px; +} + +.tsd-kind-method.tsd-has-type-parameter > .tsd-kind-icon:before { + background-position: -136px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -153px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class + > .tsd-kind-icon:before { + background-position: -51px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-enum + > .tsd-kind-icon:before { + background-position: -170px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-interface + > .tsd-kind-icon:before { + background-position: -204px -85px; +} +.tsd-kind-method.tsd-has-type-parameter.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -85px; +} + +.tsd-kind-constructor > .tsd-kind-icon:before { + background-position: -136px -102px; +} +.tsd-kind-constructor.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -102px; +} +.tsd-kind-constructor.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -102px; +} +.tsd-kind-constructor.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -102px; +} + +.tsd-kind-constructor-signature > .tsd-kind-icon:before { + background-position: -136px -102px; +} +.tsd-kind-constructor-signature.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -102px; +} +.tsd-kind-constructor-signature.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-interface + > .tsd-kind-icon:before { + background-position: -204px -102px; +} +.tsd-kind-constructor-signature.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -102px; +} + +.tsd-kind-index-signature > .tsd-kind-icon:before { + background-position: -136px -119px; +} +.tsd-kind-index-signature.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -119px; +} +.tsd-kind-index-signature.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -119px; +} +.tsd-kind-index-signature.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -119px; +} + +.tsd-kind-event > .tsd-kind-icon:before { + background-position: -136px -136px; +} +.tsd-kind-event.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -136px; +} +.tsd-kind-event.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -136px; +} +.tsd-kind-event.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -136px; +} +.tsd-kind-event.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { + background-position: -68px -136px; +} +.tsd-kind-event.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { + background-position: -85px -136px; +} +.tsd-kind-event.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -136px; +} +.tsd-kind-event.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -136px; +} +.tsd-kind-event.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -136px; +} +.tsd-kind-event.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { + background-position: -187px -136px; +} +.tsd-kind-event.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -136px; +} +.tsd-kind-event.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -136px; +} +.tsd-kind-event.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -136px; +} + +.tsd-is-static > .tsd-kind-icon:before { + background-position: -136px -153px; +} +.tsd-is-static.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -153px; +} +.tsd-is-static.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -153px; +} +.tsd-is-static.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -153px; +} +.tsd-is-static.tsd-parent-kind-class.tsd-is-inherited > .tsd-kind-icon:before { + background-position: -68px -153px; +} +.tsd-is-static.tsd-parent-kind-class.tsd-is-protected > .tsd-kind-icon:before { + background-position: -85px -153px; +} +.tsd-is-static.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -153px; +} +.tsd-is-static.tsd-parent-kind-class.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -153px; +} +.tsd-is-static.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -153px; +} +.tsd-is-static.tsd-parent-kind-enum.tsd-is-protected > .tsd-kind-icon:before { + background-position: -187px -153px; +} +.tsd-is-static.tsd-parent-kind-enum.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -153px; +} +.tsd-is-static.tsd-parent-kind-interface > .tsd-kind-icon:before { + background-position: -204px -153px; +} +.tsd-is-static.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -153px; +} + +.tsd-is-static.tsd-kind-function > .tsd-kind-icon:before { + background-position: -136px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-interface + > .tsd-kind-icon:before { + background-position: -204px -170px; +} +.tsd-is-static.tsd-kind-function.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -170px; +} + +.tsd-is-static.tsd-kind-method > .tsd-kind-icon:before { + background-position: -136px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-interface + > .tsd-kind-icon:before { + background-position: -204px -170px; +} +.tsd-is-static.tsd-kind-method.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -170px; +} + +.tsd-is-static.tsd-kind-call-signature > .tsd-kind-icon:before { + background-position: -136px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -153px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class + > .tsd-kind-icon:before { + background-position: -51px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-enum + > .tsd-kind-icon:before { + background-position: -170px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-interface + > .tsd-kind-icon:before { + background-position: -204px -170px; +} +.tsd-is-static.tsd-kind-call-signature.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -170px; +} + +.tsd-is-static.tsd-kind-event > .tsd-kind-icon:before { + background-position: -136px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-is-protected > .tsd-kind-icon:before { + background-position: -153px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-is-private > .tsd-kind-icon:before { + background-position: -119px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-class > .tsd-kind-icon:before { + background-position: -51px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-class.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -68px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-class.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -85px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-class.tsd-is-protected.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -102px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-class.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-enum > .tsd-kind-icon:before { + background-position: -170px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-enum.tsd-is-protected + > .tsd-kind-icon:before { + background-position: -187px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-enum.tsd-is-private + > .tsd-kind-icon:before { + background-position: -119px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-interface + > .tsd-kind-icon:before { + background-position: -204px -187px; +} +.tsd-is-static.tsd-kind-event.tsd-parent-kind-interface.tsd-is-inherited + > .tsd-kind-icon:before { + background-position: -221px -187px; +} diff --git a/docs/assets/icons.png b/docs/assets/icons.png new file mode 100644 index 0000000000000000000000000000000000000000..3836d5fe46e48bbe186116855aae879c23935327 GIT binary patch literal 9615 zcmZ{Kc_36>+`rwViHMAd#!?~-${LfgP1$7)F~(N1WKRsT#$-?;yNq3ylq}iztr1xY z8DtsBI<`UHtDfii{r-60Kg@OSJ?GqW=bZ2NvwY{NzOLpergKbGR8*&KBGn9m;|lQC z2Vwv|y`nSufCHVQijE2uRauuTeKZL;=kiiF^SbTk;N^?*u%}Y7bF;O-aMK0lXm4nb zvU~Kf+x|Kgl@Ro%nu?L%x8-yetd((kCqY|t;-%}@Y3Ez_m(HTRt=ekeUQ2n4-aRvJ zrlKaWct8JSc8Kxl4KHu+3VW1L`9%n~_KC5}g6&tFXqyKT-}R0?EdkYqCmQot47^9Z z6;opqR@7Nq-s|6=e6*0^`}+X1kg>CpuGnbpL7{xFTa|8nymC0{xgx*tI7n4mTKZNA znsd@3eVsV>YhATuv~+5(^Vu4j?)Tn`{x@8ijIA;wdf`+0P3$vnSrcWFXXc{Lx`1Z7 z%-n(BM(owD$7LzqJx)(f^Cusecq>OW z=h6n4YzSVM-V!-DK(sLT`!W~}($=O$9|ie`>_fpH0=1G1tiIFw($?~{5T>`74|p0H z``5=UydE)!CiFvmECW|s^TzG9*7pN|KknkVm3C{fEu30gffX&8iCm? zTFPm6*k%Hog`Q6JGj@dg9Z5nlAc6ApUe>;6xauB0-u!?wMU92jVL|3EcP9gEu5^wH z%tXRy#>HCEs*?KgMf73UcJ!lJ?x<6+)eJ{mEIS|HMDP7(7!(< z@X;?ACT8mncW9*XIaiJPW}Mw@b0W||)!sYnLw)0j4&-rXQgJhnQ2?frg1Nfk&JpmV8F=dDZl)e%#Grs|&0th7_o) z?7hQn<1078qcq?#;)CH=2kBBiGt37EtcXfpTXtHB59dr9=B~jI`yPm-Q?(ys=ajAu zGY;eS^z&WFvztZI3I~}*l}_lI^}6D<&CZ94;|&G9_pMx!C~$~EL4^8`QjT#|tqxxk zhl4CdxppbDiOk!Ht#SVAK4gf6Cr#=U&1sVxZ`y-X zTSi#@wHf(?(Dd6ypNOyshRZ*tneVP^W?y?$ur_!9iD-vY{&Q5(ooX2;`SkUjwEYA~ zwGcylCT4_`MZobm(0v$U(IhfYXxyjNJ@ztpH0sDmfpn|LMp3eM(R4uqKi_q1=D1-d z%GdV<&2+_9k@sc44xhIjqktRA2!Su|vzM0R-@#MK&{RdLoU#$Hc?{{JItvX{hKCtc zQNqZpkfG^@LGJRZM4H_>`F=N;O*+_`>M_ko_XWCgu@}ntqLX8VSeZQ_25Z8|^!d?o z$~}~9|`ZW9d_o<=8&K^~;Cr08b;qgq{(*e*sNt00lO2lZ;m-b<`Rl}=Lr6iQ8+$&br z!RLn{5a}j1Dh^|_1)Q?<;iBSrS0V|c_D@3}mc2d!%tV1VN?BC@clkFdx?HB&9KOTF z)9eHpmUEYsCqx^%JHuNdwY zz9P3oPYuTAXZVY}LRp&2qNl$pbsXL1GJ@wx?@CTO!acs+OFfW_U6?&As-(GJED}RR zO}B+Kxph7aUUm>i3rbPZQGXN}oQq;u`yTnFDAJ*d$4gjEJH!JPyt6V{cOUp*Jbyol zE$8wh)T=vpJOWRbv}HvR(cUSlO}ePIPdJ`J@yp=IC&E6K%r?QfW7F&%p!H~@?%yj5 z&MpiV!hyfukD56A097f!0+ANt`JSB~oLak75oKQN7FH=rQbX#Eak37|4&mqp@S~TA zOo51)xQxX}5NQ(3I_UeR4B;P0Q#x$_lDce78ET`Blo;`Hj*R;b8slZS7Oak(LjDuE z3z?-~-U@vWe*cEOsf^9|duH9};Pe)!=Ky+QQ!jr2VV-jMUH-F>oB>Ds zDJw}jm%V?OT^fu1y`$`yRdaW03L?)6vmInxhAsGrPhWIP8?=speMFf9Inn4^t zs$!88*B~c1A2J6t0~hgK2BJ_Pl23l=oeQQqjI2(4Mcv6U_#9#$PEN|qz36rCZ5$@I zNF1LpRe%ZG4qwuYr7ZdaynrPs?spt;9VbQM$462zbksMVhAOqPunrR7@Nbv#5;VKk zJB7xC?~QXd(e9REiLixHxRGhLcKR#0va}|LMS`AXKGOIGFKQv?=+>zf^ zN5XLjX6^`zh*%1UG_QV1H`@z!HZgC+OT2`+_B( z)J95hk;3C+K4XCswSP}au;fx=47~*$k`RAaYEU-qb03y0#x|&>LAeiXgri5E(!h9k z|9OVt@sk1-4+>0?ELyw|zs`~<95M=%o?Gix$?8z4Gz3Kpw|b>?BcD&s{X)-aXg!GJ zyq&`ZEP{K^u7ActXP$gGnO#F0Sr+QUZe0&d5*Yhw9A?C4(Sx2j3QKAlUpkQz7nji^ z%y8F|W{ypj(T%Bf#Wgyvq4szMo?*U-;3IGBRg1fK9!h-=YRsZ_+t~2!-)=pr;)Vnk zmt95&wMb02toOf`I9>M^Kv3LqKb_-#jauF&cGrWsCnMt?p7*uh zevugda={D04DB#7wR375=1i5}Z9fi3r)!F#7qmX9`SjppE&%8l8bKt+ADRMTWRv21 z4L&PldV8YpHw3b^`p0uWlIm#J&K65-y4lQW0VzZR!4#gfeT{b#fL1e*)Z*Ux}M^}bO%OM7uXip_4! zL@yo@q{utZeVV?3CtXs}i>nI|%26fwuzt0f#96fQ!{=dEX^YKnvIk*D%y9Cin;9R) zi{?)baJhgFs$1$SOZESTpldw2H&FD=v*v@1cA!`|s;avDKHa>Q+uJ8qhy!9%C4&lJSTN4OeydYOm4S?Bj7*e{xRYbU9Xos)R7qZT3dBBD5{ zo+(E3pR{>>)}hFhE+}!yYP0V+CVhyAq+RV{^X`XA3{iXj(ir$k@u|t8ZJ1ZnHq2dd zD$0RHmGJ=!?T5`*T2zOEJ~y}Nsyt7O)%+!0ulRQdsopJJxoznfpusv=2@zLXIq@^& z>0T5k4lzGCG(DnltLIe@6=ZOG@C(dvmYXfh4IhJfMfY8S?KkT znb7~EDE}Yhg$J1LxB7m`L4VMS(+(SXTQvh_mz!x&M3-6Z zFRB*a%_gVEqI^mL5|c%V=l_oi%|~h>gL0SB4QH5uonWd#={KPg6}6ES)zk0~#3^KJ zJq@{iqbHe3gyC))jeQ`W;(u3|q)JxuF24|GMsh%v5>>VY-bok%* z1Yl@(5G2UCK=fQck}pAyWV0n{`ML|rsl_N7vmW|frii__zB;ozrQ7{z)y}M^Sg@m_ z;+?{q3sUZs3WxnBbp~CyyL(TA?C*0KIeDPp7w0$!Ijd+M8#}r~vYW)NB*$mG*7-vH z@s^wK07OMxq>WveCEQFQ*p&2gjD1j%i+#G9z##Th`gew>H5=`RwyfPDg2G%f>x3@c z14Oy}pQK?(i06GWLWu%4cGjDoE-tTEI$`9^E?nLT663vu_>6K1e!N>A-^q&tfl$0& zy&>w~+yUelAa!c@xd8iyt^`B^$cj+}h}0i!40K2Ve1KFCDezBzZO8@=k&r)`TNTJ* zzF4Pim>SYL^=~7kW>EyiVHXNMT2)8l#v^IW!pLB_8ZvVfK&m8QHkjsZ)mvd?o$VYG zX#HiWwWlW>N{D85URJ-d)}_3h73|)X=E(6hFzi#TF{$4aSka4TeY>1a_(RIkFBL#O zE0_FoSQI)}+si51ufAqRHhDU=actTRQl@y#2h}xaDv-A&GP&0Qu9V4ED5aWnX z1E#mRT1QSvL!4~%Ozt84nP{&F>VIm6w2q!EPhh^BF-94$4JhCTcrdbDXA3Q&8mPTh zqdPv|X}??B?bIZPpl}z%(zr<8U-NoXjb*L#xyqHHfpIGAgN$5i(E9#rYPYq_tISC4 z2TDkd*uZ;CIhVI2o!||T)Kz`ER@%rTf-&SfmJFF>;d(RW(B6k!1<)uxHM_1G+9BWe zc)k`gBxYMcztqY5@jccaU)CqQ@^G5TBVx(nNf2}D@);3+{D)GzyT{>%dO6ibggS({N!!=P4=M8J}5R*&fgd(w36z0M0D$ z(SN5a`i%sZ9vmaEjiC4)DF}ix&`?mc-vYwK@+}8Gqzj6r6y)lT|Iqwlpj(LXqvh;- zb>jECiiOZ%&Q7gQg7(ix-?-RE*c(O6NG0F-+VCr;701@%L~fyfHnU<;Vk`m3A2{1MSmpii@G*k?KDq0GdZ)|hd`8OHep z8@6wv_|9NKNpe*sc#?zZ1S#}*qk{k<(I99u6(QT#>wf9w^u9~9_>;2d20T=^g-;b5 ze9x~fHZ-JL=J`hq-;W{2SgN)&m9RsVo=%?`JYp`pxEA_>`18Y>XA$rfWm^pQfG3MQ zxT^I1*({tZz2}+!5$AyNUE*jiYwu_S8v<#qZS4e!bGGBdY`3RkgLMf%Kz8s-;7PF+ z6w#-FwV#)PiKGR79miXmrDyv=ZTjc)j>N=&h4F+#G;unBZhhZz?a*;8@bi5`fV4)O zuU5pCs;tvRzbV@P5%W5xLI4I+w*^KExeVlzP4kNRGp-wi3g$lf-I|(o`JQ|u^XfkP zcik+g-5~2lG*oHfjLCpfNalFwz=4ZY>$Rc-QGpws&tCfFZUuJDL)3et%ap*$Q=-v0 zgLfsn-&%#+wnox~@)6ppx30sK(UJg1dCAvQF&}DkoPI+uX_wH))iaYvWtl}BtVKpU&MN= z0GdENbhdLgIwL-#_phGK;mZRlk4zq8*)akvV5zRX@jFUmvcr#3p99P@4z@m|bz-)^ zbZl8Wt?hR*z(sEZl;2PaILIG#835i@YoZQ@EwrD9IOBl7BpJX(ilLgcd)KCZAzo^b z6Z{|~=H;$D2dD53tejr_jx7^y-zT{SNZpNjn4+wJQX~K#LcrlKOv=D5xk%QXD{tg; z+xh`PvMV*HC*rF?xyjK5@KsMl5*w`r@wL#r13uFpso~#^oYIFc^&gGNS825eqFttU2_sG%_ z;X8VXD#Ol4X&$2B_Z$*&-)ZIUXf9I%mOOXJ3O%GbGpJfl+9(jY^fF_(b!Gt{{HAA3 zusUOCPDHYT@&*H~7a050c7r-_CaFACp$BXx)5==@fC11Gn|n~~+u@6N-}lvdyl3&6 z<#c_zm0Xp1F!8o2OBbFfgzzC4vno}9XEf40dGaVo;jiwiazo8hZ~iPVD(re=5k;H| zotm286$6nnTeIw>1FY$Ri|t{Lp?o(Fg3g_>|y~Z+16tvyLc@r?t9g7 zBuXyVuu9bC#q`?@OFIhgS)6v^XP@H0ukl2X!RPMsg%`YHMGad z4{VsgxaprFss3X%HbZablb6IdaNdbISVWp7yQXPPn=s7?J9qLEH{4>XAv8}%h&TDg zs()1sh}4at3nL3^%q!?P9BbW80e*ZwU63}CV7pt}gVu;~V6c$9p+*wfhw!zeE-z|V z=k{Ksec2)$Hu&?pRh;*TPk0T$Fc~^oAoBT4q?-Q}Y&3DluXeoMQ0LesTk}pVlf5(I z$dl8;zA0&=L&z*F*H>W7IeiPhTo@P0VTB~vyC2Bm7lCN}t7@NNlKFSHGKkh?z_qij zoYju!#D4b28cdslLdIM5Cmqe&!v^IcRr=qq^?l+P^n@6}fh@)IS81hx)SPAY7osk0)^ulqC1F*{hBNQl+Y}b>XjVXnS_Cc!L zIZ@Jq#mp^E&fKT~t4DM_^S17R@YJ@`(7;zv1mz_Y=~q*Gdg#*yXGxotY=#F|lvhPM zjlE)VHS=8=)njE^c7M|ZiBqARx>9Ib!y91$70iC8jPi$c+ysP}5Q3s`ti&1sx>~oG zI^>^1onS%G`mtq&)cZ15dZ{X^#MOfatyH0I=l%Q)n z7*@kZtC_3?=J_}?_G@?F?UK<0_AhYFclyrS-PkfYhAeVHcF z16x+quy10*2V$A%p_|@C(vlf}j3uY83h(#TSr$(;^8(I={_=YQQWmA9-IlwJv>tQm z=vN-I{TO7X`;qBxwb5w$91YLV?ZD5}pddq(7IdMCH zi>`qAn|#FITi!L5;K!(tYm9r416}Wof}P8~?R9I9Gp(?VA;uQg19MO47*gS7fH*&jBO!+ zA*<^BMccHjJIvGHguBb4a`X z3aZw#!c&Xr8&szD1+gu&;vYfoWo>0Pxfr2%m34tC33fmRbzWF9I_Pqb9nNK@N##9_ z7K)v)des!^owH`MoXY_O?|;^9;comiPx0e78xhnnVvTYt+t+cU1rn_>gaFJsL-iPn)?<9P9cF#4)7q&v+d&6|3G@s-AcJy+m zE&u*GUaMK|x|4GmT(CgBICk`2BP@3rqtjKIRD#uBy}y*d;<>`?W&mGsG;i*_}V&^tlP`%;=g39@jxP z+3lrtg*!i6N;irOpUfKcd;iDl5a`<#kr8RwFm9=^m+ouwwjcXmTB}w5V#9IF^&Bl$ zr1$Ly#cQ<3u86>am9}pk&i%nxu(W&s@>qEDtn_xVtH-_EiQ}iAK4Ssfsdn&L9t=)d z`XOQN7*J)g$Jrtq0=-yeLnHg*23LxYA7$cxz^Yc)I6E-!;{LQwu_wfGw4&MYy7{n< z@{g0Hf)N5gAJKQ1Z&HGPn9x9B7U(m(9K&=+LHAc_D{YdMBZs~x)u1Y8|Oq!`C4(3_9<&$ddi6>R$Nsz z*ti?=jA-Sr_97V}feo+}Lq3-cfpgWR;PLI8s{ve9@?e;2o}0MpquOucipz^DrT}QH z*(<{nLb4h9799hx4&%I8KPj}xcQ}llgcaG1!nRb(PP?m)=CzA4v%6>oOe96H9 zv4mUhw`>V$29k?)$Co>qIqq(~3w4jJ;Hv5(RxjB-j_iEhlF;&|DDC|I8IcT>Vn;RY zhtw5mT0ygXAu=M%{^;GqYuYIMu4H;Mj--5CL}|zMEhOum_o51Y7i|D>$XmUFoe;@1 z%GsTUsKgF4w%-Cr3lg#~h)8;Lk%WQTLBS8r*sE{YBUDw4HU#o}E)8pVIEfWv&14?U z-+Za${OFm=>IA358en)nB5Iaqxw&Xi*ty@uDOX8o2c0tq0^sX>ZXD+Hn|;KY!Omm1 z^%wgf&Zy9Azd?vmU`~zuOOA0{TZ*mAC!_>|avcN83F#c+sFn_6tGo!v?95IUR2bL$ zlO(OlhszqAgy)mNt8PRulC#6u^SL#z-O&@{=_!AzBZ>T4ROorj%fx$A;u8u>saum0ha7p zeHRX-z)PW*@v9bruyAtVI@)PhaEs5kp`xyxTQ`U9$Whwz#z$=U$V|&0w@EfCUS!Ob zACSTE{VeC-0V~ZCpkKq~P4CLgdOeBy>vB+0ZxIt_Cp4aa%vI#LS^K}ui07WNo}5r0 zagMHmq-jqTf-OD<kAvu_ob1mUP%1jxeKqB!1&-)_hP{p74hHE%WM!atyx68j5b zSqwh8aKo|NIOL<2_eiX+iOsRP`{MUt{0iQetB*SL!F_8)_;0f$iJ4(o__4KWuvy_! z8TZ{dTb*rL6VmuN-yl2Z>0glL84u^jAH^DQl}VRI=x0CnuF*|;|My-5aPI;>(mo+m z`nyEOe&k$RG11$vEdDPG7^raBCw|#C*4#pIUoZJNx?4|ZC{)l>+jaSiiJ`GBKf}l) zUk1>%A61hqy!KvfRsM^|u6vwbH5WpfH(I5AdpBAg%rar%zW}nccGxfgRV4&v`tEoGyBq!uz^f zVqWEtxn%j&+Q2Fi$rL)H`M_HExP+?mFyN^){c{JXs{IM}f}p>7lfD zLZ;s)%6a(Ow@`(jP}k~pn@!dv6JhJkZf5UoumHv`g-tcCs)w* z#0sc%t9@Li{p}f*$vg$UiQ*RGZUr=ykDIaxRDU_(QfcURuYrpX*7IQcS$(Buw%VW7 zxaffDgn{-=K@iEh)LlPc3MPzc+qM^>RXr6Y8ASnP&dr6fqmwYILTpmh$E%{Iz%Qz( NZmR35l_G4O{0}dcmS_L~ literal 0 HcmV?d00001 diff --git a/docs/assets/icons@2x.png b/docs/assets/icons@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..5a209e2f6d7f915cc9cb6fe7a4264c8be4db87b0 GIT binary patch literal 28144 zcmeFZcUTka`>%_-5TzIqq$xo`r3nZ`iiBRG(z{ZnN$)K|ii-3S5u{fmRRNLEoAh2n z@4X|01dtAA(50@mzH5K?{+)CF+}EWTz2eMdW-{;n-p}WG1C$hCWW;pD1Ox#ad~k9g4`y4!oVfq@3c(iW~uhy*`T7_0aH7`>`EnYuXVq#+YC==3#rnNM4TqqzM zpi2Elr!3hl!ZdK#y0bV+yVc8rwFEtAX3=QlvJ&e-EsBp)Q`0yKXbNuf-yYw7kh0CD z|Flk1UuHgvoR+*QR0ee&IDUfUzE7*`A=P$6nC;BPI@VJs|F#`Xc>X!`<6%M7XXNok zw^unt1h0m>-&2{GiIGsByulr92XZRrazZs&&M3jJintF7A}cE^uW4zt_r81yHt1I! z6-_gmO@78G3$})kfyhR0^qk?zev_%4R$qSjQI3MAg0)9EM#TOAD=_tf(*)S$7yiiR z&5v>wk3Bn**iD9S_I#2%^vi(^O+gpv2i^A);6^AcH%VC>0nH8|O!jN*L<#RtT z@aF9HMNu*d(BdiZq(LBO%(qsjSot+ZXQd{zLYh#CvOrK(?#u+|XYRylqcXOLk=m!) zBp`~~1dg7kF(Q#m)I8ZHMOD5%m&U)5jGOW@7+sm1N+O~^j*zRG;e4x@OteV=T4yo9 zSG`^0j^S)ZYp2DT>}AR|n$S)4FPI#8#(R~;Y**AZ9`&yqT;p`rks7Nhz;)dn-TgXU zw!^Bo@W6|jfp@}ijsSEFo#x3LnG;`o_yXK@2KuG8cTv&K@=dU?_PK*6=YU9!Ix8l;<_!y*Qc2phVpLM}&t|CuHBv&{M$K?VXtTabi(7kUMwV zl!>5cDNNqK6`Br*B~EcVh#5Z!FgiJZBN5nzpC7?UdAc+&AT0ivd;DA2$@YXMPK6=< z+#U~?*!R0i`3uu|#zDrRRN&j-j>ZOu#h-n#7WO^)@0> zCT6a$LGWwFLcPfN=(3#6`*UIS%uIT=LIXV-RbGE&!!+8)q~dkx`l{aKCe1`{J<5&< zlhRo;JX-UC>5)X;mwR+W96`@&ucHp$jIb~B_w_=mH>In?BLume!Wta=`ca+&7~pek zBVD?f5{nelCaje~EtZn+g3%5GJF}R_b`q}IH$Iom2IRD$^h*R)Cid8Q5~4Dzm!P&Q z<`iI)4wA#l@TwjPL)*9k5Vc!!;`9;bf?HRMm86wi9LI8A%*NGep3g11H{aP)>%l2Q zRMMQU!*0J$hJI5Qs3b=6?}qR7O;BU%Yzufc*ZKBV`}ro7zm=C?OY6Vlabc^r6r7P> z?1c^jD{e4n*Ou441V=Pd1eE8utX@)G5gq72HQAXLZ4l2wKd@yIYC+s) z-mu`E`kj=B!)a^B;pecv4W5oh>_tpj>^NU8L*eH4EhcOxQ|);$x(z(Yb5^tudSptV z%8z{(h@_t`chWkvFX=r!p~Vjhf1AdM>uGK05$1fyLb5D7m0!MUKW=JTZv)bXz9~*F z$yP@U3UE0=$;yjWr8b7C(1^oNDMZVxYYeMtL}ZnvQDkm>S0)=r_ugabEZ}AJ<<_Fu z{I^KKIz+V8K|pK811W5r##z8^S*2fr9Ln zlRG?Zzz8;xu9VSE8s+=(!^TGi1P2hC7%7MUqF=cZqFBtJNW9BROV ziv0cjsUmVvsU^X!`1UivK|dy+fSG$3YH8W0`q${`)taBT9jV{Hfh|&RIaJVvqRIFh zC*Rmvl&3*;XcMiJZ-+Mvfe0xN4N?AvJeABnNdgs(BYb!fK5<1)5UvM!Tz4_aojmUX z#Ymoh)m%fN(>6|#*RP~Lxt1?5);w}yT_lftje3sidO&MxNgcMg9@S+>M%s~y)0i`8 zT_+7LrZ~d<7V^K^C^~ast~@nM04^c5dw*&660^p%^R>n4xzd&jo)Y@ z1r=F09>jFOr%wsj^a3;>N!{rvf(qpkAdWM*5IYCsuwNwoJh7;9I$#`T6-NUIEKsiS;OylQ(XY zQtCiR1dyEGJV=~|zaFOEveB&szAVx*wsyuY?hiBGWR{h0!D zv;G`;F9cnib*YxugasrI^%uy@i)>BvC4V8@! zwy5#iHC#Qar(i0EPA3CuMQbaKy4m$CLjLSNwJs!13b%h{&x7479bv{SjC&3?SO&)3 z6q4nRRP(zOfw-mQrmx@Z64~o}GNXa9YCE$vD-(CLseaF%6HH+WZz4 zbRiJ~zAtA6*i9;z!+zZ?9~V0Lr66|Ae;}U1e#6D^hMhB6XJNHZi{t>DgU&jb=#rPK z@s04Hr_SOr%UCRY_SdDuSw^D*Rzre~4PCqgc)DBYam}@G^TxsTqX%w-yWtYU-Q2IX-a2Z4Kz_-yIe`m;x2bY1F?XZoIH=`uW{$R)ICXxqU$- zG#M6s!fDZwUOA_cs|PXe1T@XN3^UdYyR*t}943A1dTvXp!=%8c%)(s)5y@OJ@@%1a ztlq}Uvhfo3^ZO>ZO|NKfu37JMRRmXfJ_*VOBVnxFFmbq!zc%A+R+w|={11?sJpmca zCeCi;;-*yO)ywzKxa#q?E%@U-+LGH4{=2|reRd-Kz*Ps1$u6sPFO>{K9^k2Y!@=h7rZt472^BCU& z|0MZmbh1HlC3#bcjoX#m73R?H>6oW=45{gu0$S>j`v?``ch#0kGur}QbO_gO3XrB- zS4pz-Yrnqqt-k_LE-&~ox9gd#^n&HE%Z~grM;N@Das8-#U304PA$v*rj36j~qQzYN zsX>8?%q9DhpxrWR@M>30YI^WUDh4bcn+*bYn;~zt_g`$3{#G+=lBmWE;j}5e&vlDa zjsdE(Xg^o(Z|3$Tx>~-q5NrZ}^$y0eMd|h`7Y4OWkgF0(Cu&CfJV03AKfzSGBhMU4bqd4kc`qE!CH4Q^FdOCtUHaZW3R&>S}$! zhk=OYL~3fch$-?wa0)OEkynDzJR=vc^vuUQ$hF(>E(q3{7{4uhC^f@bzHUZT>k%%R zsekA}E`OlGE(x+lP1smp0;Ba7{C$F=@Pp~i$AsJkc)x+3Vf9xQB=aSN>D!T;Y5iU~39#6yoQuj6Bj%kdYC z`72YjnSoF_A)d#@S`|;~F|6TOn%b{4?MWJC4uG&NK=D zqd0rU$A@62MtWD$=Gg>TgO6)b6Vf41#Au&Zq<@p1RG!t}NG8kv#>%{bHuCdAeIao2 zkWX{dyO`XCdv`FlK?jS{48~Uaz;oD6PtoFF0u6HBTHCHh<)5wP<r?9UIw%{psu)`l~*PK0?1^oH}d{D_wF{En-ejdBHTK|(*2$K?xVkG zwYXl8^HAjVOqKQj0f6s~O`)Slp+alXd8@#4Iw?pHys|MW1|l%ipCPeN)|fLB$Dc(9s}LNw@?8G{ zU>U(Vid5}ltIy~zNv>o09)rC()g8O`<5~!qF*Z_?L;+2Sy!WSv=}|67mnOPb!A*2; z^f>okkk+f3+9?Tg&6NBMX%;BtB3Ds#(PZ6E4`X0e`~amc=9QGw3J-$!nw6)l1A8;m zFdl>D?g@J3P-41+3N`R32d*Hq0GWj!{3n&rVA)dpcB+|5`XZFFZI1bKA7d;-x=0wt zy;$6nvCJ$_&JDjWa%`LQYq&(6LqBP7G_+`+4$|qk7IlS4wK{qnP-3!yFO%_fw(8(Q(#|htD?ECEYPeT&anf%0GjGQC<0)vR3x=4pq`@gX z{0?*O(e3p_zu@N9G2O%!F8j&|FRhF(c@BWMxZTpdW0xv^K!`2L39%+Hs0#R>a@n-J#u*kF6~?DIhPrUi@$pR0tS?5wF%PE z(-eYCc#{7tVRzd>j~xO&LBPK62xxwmxrdd{N6!G1hfD0H?fV)_B^PBIm|@~CZXnpdaM=<+?&D8Md^RL00JfP zK|cm@`4bB6muuN!Zck2>k+wh^8kM73#1(%6#^TG;42H{?eTC(h^zB32g{Skc%t3Dn zcHX3$TQhR}n9xXCd$?igvlBH@ZU~p4OO*Gf=$@=w?9vYs)!RYa9V@}xVt8Sr4y_!< zGjn5?gnlSKhqS-YW^o#@NScez6I3x{ zv>meTLLYSK!pa+|kqQI8rWST7_)jL~mqQ}Ou*!V2U-g|ZR+pB%Z@w|HnZrV~uY*w?_gMhSp+4fY?hMmdNXYD(iruAlj0&qga8nQ1=c#y* zgYc@oWp>=|LQ+s})zQ5kv*UF?QMJ2|FN1CzjX$x&TwGJ!4VjOiZxVDVz#r28{^WRn z{o1SYRs*^Nt9(ZX`wad=44v--X~h#aROW$yKE=n-VWRfhI&wn|_X6(` z_WPK(bt4Q8gxJ=b%BW_nNj&h;H;2z`{vi`~)tCBk(zGYBp?f;(Ua+^@+rKm53ld9S zPP#A^Wv7>F7c36IAp7(%S716|mr9fnL?n&Q*?OcmX7>@shP*98yVXmJ{1{z!s;@_D zt0}M~j-0t@?)wY>a9PxzCVtBiTKiS1<;-&hv5CHiv=8d$IOnl?aI_>zR3eW}l*}`T zd7%jWK1w(iqAjU37u~dz-4@O^=PWhD7_yL+z1;-hnPx|je;QFR?I_x6McEg|;`Zuf z_}_7>V@hb=%%^H&>8W{N&Ud5bKD%p(B6#&l@nN^wOdQizb`@g}g1c|qGqGr^c>a1w z|5;G!BbS8(8#mlqM+re6&;L0Ba$evPxRGW!koG@-z@*c+8&^U^7Q+0jgUtgB$)Bh)OGD5oa(ju zL&w{}@q-4qVXtvRtXul%gWH0DxXe$&?MN>z2jh1!ElU%a2;fz@xaTyfs`lnr<` zLv5teGAw`KJIh))Wg8JzoRNMyP>X1rhr)=#Y8O6Nf7>}xLS8!@+&6k0h#H>Nn{`&~ z<h^0MI*wtWWT)UGMw#$-to|sCF?yXL$;_=8T>RsAI7ks*W{$R-UI&M5a3{Gda?9J z3PeWSws3vp1$(`F*+<1X7B6hG<6u)lqr|?N&1Up;Si*MeoRFeRNGZa1=`C?4ZaPvJ zuHL9EQ^d$jd1pu9n6iBgWPMtJyxmfJGQf{a*eag-%E@KZ$^*2_&F#h|LL)2_l*QS9(#5T>)&wtE8a=@FF+vG8N zk>*kU^97;}tRP6EGf5HKhlr6@^Nb7N1`_>QnnYF9-8tncspx59kcfE)TtFun#cCjn zEU2;}6Xu~xx+Bv+O;tKLcuo?~kQbcPghcWdz4-^H!wQOhQukRZRMRk>kfMa~V;A;p zSqpR3D87(4X}j4Awfr<~7h4dgK)pzpZf{bn z^yt`yH4+85n%*$3rL0fWi>l^4|J{Qess(a2+0W-O>gl%xIaVi`l9N3Nq}{$Q?o$#6 zP(6};On20~O*x}!V+=9YO)zz4yeTv@_04tEzA@Muc((5aTR+rHpa6@RymHX{a%Ss{ z+ZVey@TSCpCZq6G3WNWPfd3Z(|HlaUnQ37#)!hnd5VH}%lQbK+^qVrFox87bV{eTd zMjY@0wT+?ndYzV$vST&K{gWpow&Zbq;%=a$(B%@MLh@v!P|L4U zgM9JBN_Gb)g+}3@K$8-*b+GGuC&@6v)Fomd?4){kVQ)620*%U<8saNfLM+ndN~1z> zV$;~rU}Fc&M@|;i!@q(ZqbHdoB(EYYOs>u5jd5A-M`}}pr;g+_B5o2kj-|Pa zF8qc!e5d+kUV>;ih=57(*r24g=6@)>+c%LfGLw_-Bbm7r_`az+tag}5rqG&jrg(-W~CJFkaxZTf@_Ofx@ zzxqF#<4|HKKBpc&B9R1r8t{!k_=WNfzbR?aogs939=bT|!c4N>91ai-wsc4|JdG9y zGpB1A4i1ueuSS{R3h}0^YLpx`pB;Ok2-R5 zZzHya))4+|xc0QJ*&1>3;@0$RcgE3M_rt55cZ9<51j!pV&i`8js3v%e$CG{I{X+yj zruhC$iN%UA-Y%u_?FQq!rBg;{`8h`ZCg^bG&OC=733*%4cUW`DPGqp|OgNy?)-Lky zuY7>yw$@M~Jl&X?9MI2RqOdsWZwzFd6{P)UF5-=GVh z;$}}BvAUMs#V{T@TweGxI7dhuIzFqotm&oQreos6)^Nt1G4l8ce%&u1F<%WFM9t;W zBAEtq#1FS}e7Gq{9nzJ-0@1fhx^+w)&5)h+@I@?kv+h4xs>`xqTMB()kR)QH0W6ODL=b|ea)CmcTzPItT=KH66{L4@p}bW9=F z=+(cM#QUgiq$M^X08=_kUPU7sf!8j#4rN7NO0#TX0-;8=ySO&T7v$C}*`++cHZu0; zRv+{Je*j9;z>+TGv1i76Qc^1lu^>XXp&w}t;MzI_nTpY_m?O?J|UF!?x>j)zIZZ*}uTg|S?56^~@P4iEAwq#7&c^D#OmVAeT^&ib{UcAER@k$$X; zQdR$NNz=G^;6|aY!VuP>0e2>_I^ymyjmC*~Oj(aU>lb7XxoNc&mR~HbdffiYw#m3DLJ)nb-vczmSGI=PaP=yOJ4mrW01pSsP02=(ym z!R+#8VFsL>Puje-hBZZ0gY`?oFt44R6Z--pJ~w8q7te$W<+z`WB)mKtrOR>%f~{*2 z8>hh;3|%NPQq8-xDbWw`*n5*Ni7GB0zr7D?q`b1s^a4*X%Jk>EYA*r$va{t*S$Wk8 zL^lqaL9$a?PVadKA#e`-ocbsFKC1awpXsVmMxs^Fnz9Tb*6tD1sa`;k~@OqRo@ub(|hVwu)j^O#EQmIetE!ma(-|!O<`ZRqJb<$^dia$W5ARK;F@n)=G zXY|L|OhQ88G?ay6&;=(qqYF;O$NJ7x1?PPHYJC`UButfql;CF9^Z@N$9e`rgvKY7- zzkY{r^gSjplQ4S;+v7}YOOB)q;im)xJ8Tb}^>Fe{+E{o<&QW1zc~g`vO5=ii`UUW? zZp)~%d!YRLs1P5Gsp1zs3gc8)u&mU&?P*XcG+Tr-__K7L+$}7WQfV_Ngi(tq_9feK zK+m&sYg9Dt?NYYIX6$uOy3OW4i<~fWv+Cf(7LSO2Cy{IK;1#Y8C_5@I{l+TY*=I|v zB849$N`$Qn3)Wezrk#N{(Sj^ujO*o{#sa4oD_O8zmLim4B{5HQWLd}YpB(b z4G-q~15C`KQcuBSO|^7AHPTM2RneHT?`cv7UxhiJ{_{;Q;kGe05x5xg&K3|_>$pD_a&U>aXaI13$(JL50d8Z5nu7>Swu zA*$V;mYnn2)kI5c`a29y*`L60#8U8YzlVb^NVbZO*AIlUcC6{g-vYStoB)oYa(>HrRpU$_+Fu$?E^-+?mgq9i+l>lZ?b zT6(Rs*ytr2RlqzPAC<(}aFaO~EuqFiP9Nk%5YV?9#t-?A=4jtCuRhpfZRc5{uXo+q z=LI8vUYPpMT}NAmAiT1T|Lra-gEjft1a;1k`{Oe~KvJy%Wz~FR@vzsl)Hj`G)zsap zD0(^YuCzHguv&0Ryn%gl!eek+ywQej&`(Qef(ql7EcAYQoG}tAUY=Ns0uhUO05V)*ND z@*NLrHqhR{%JlU-nMJbBbn#Q$0gDOt;1glG|M6dhX@zoq#PRvcMk<`}n-dBYPlDbf zY2&o+<&J4^>4Q557tWSxa)1M;mS}X$!JFe6+N_0AI?erp9CdjDGuyvnelpc04y2u#n8-PU5wo6P&9?ZpnONA+t}Ucy z&nD(V>H%M8avRC7jdV$uW8n|L5W6kw7|(e8$j>_ZLqe`6y!1fWM}{tJ3t7HmzB894QuSOpNj=&WDT3e5Or0)3wFwasb4%9_M@6)K z&l3J-@<{!8U7lZ%P!XZsO|ejU04NSjBEBESP4Ff6+T}!&pxTCxBG{W z{I$5gyC-P##k--2l=5r77AsRg@o4?Q7zqe%7Y9-kbSnK|KDcKK;nZqb@o$i(QzUtW z4FlkIku@T67|OO;)}XWaHSwT$i->~}#O|Bld^q?M%%`d*s2x9BKP zZo$OD?q27J1NAg#Nd(Fn?4I|PbI>nwdR&!F6YOHC^L#n$QG{zQGnjL8QL{~TyS%sy zMT%4c%BbJPXL6?WNg|O1-c<>qUm^=RW`+5)eH2jAI{T^M6-_natW57V(D?*MKT4n;I#vjkQ1Y~X{0hj4% zF}qYRzy8zJX(%d$`X$XgPvDafqM65Qw_;|~(JO*m8-*q1ir0~W4cd`@#KX3_GEp5t z5?rPAGz%$L?%(5dRFgw~R^|tdxXDGF>^=J2drvtC0;nBNt)$2d+>6A}c}i_~ef`fu zywIKq{Tp+H@09h2i{+Dn7?p7~8D%gZ+<(bq<1f|tL;Qy~w3}O7WX))3Ej+(psj!1- zrlt&tNKU|u?sySN{!ByuYY@P5bL5@7&Uld^k~iLzJaP7WDAI|JZrsHHT>hmAC?xw& zC!c!IBNTzL7K;wAXR3vVTe1i(oYdqoy3H0Zw{@>?*4UcFaMCNHwib2efs0(Ync=2q zwM72#(Cn=nv2ablw^j({)fdng^E-(uP|5UD8@CzqpKlZ^=HH}?5{kmM7vLAoAatc; zwH5KZJkkdhh8C1p5+HZgC}LE+Xu}KIn7|*#?;j-8^-VaZ5jOW{JA#*;g5p`(xTiDd zKkPnW*IU@QEsE%-JWbaZU2+aF3<-bfklBU}TCC{E-~c1suP&!}=v`e&X_xF{wro+L zcgxt?1af+ArOGprbI<(>!E99@GkN&7?#q=uz{(bMN@|0qqxcTr07b2;i>k6W8Za(r zOGe?77{mF3SVV_<+hIDRNdbE)(lSDJU|Bf|swOh*8)pQ6AizER8M>1xnN1+Qcqhg$ z&ak{6PD5v75^-mAcvoOH6*!9Hkzpt)*#Ip_vNoGk)^|nj*9+w7+7R(=j4q>aw<4Wc z=nBx)kd4$ER29&>bnknJ`n4)pOczJMPJ! z0)p$AgO&S=`T1(PYN?P}4cSJ%&R?iNexQp^N$*`-AbTP7WfZIW#P4d}}S2|=#O7ke0mzh*aEWQE)y!|#~iGCKXe zpzrFFL$pk!^d8pUI(IfGO<%TTQHsrDXLDNnMC6*d0wT9m7x6Ft7V=_OlTqkuj{x>p z;1kpB_NxE04RdYk)Y!laqUU=rfZJ$T5)`7`QV?5(Ltg_xlECcjtEa{J!@6Brx);>b zl?P)xrifEIfWi;~!Hgrq*7bz~i3BH#^2_mOIb$vnOz3yqef|S?NrX2~aMzcrlIGhJ zJ57YYnbrjk0gMXNJsZ;3!GV3+U0eN7l{dNPN>2^D{M%{F_n#@Jh)M2G9pb6tlT&F# zzc){OFWO&LCDH1cNMGR@X9VA+vt>EiQ|#sD{Y6sIh0eE(T5g#Bhn{L{CgdEL#dtrL zC>~e(BtwcN6QdM$0h>v5cu{@BvleO1d{z*-w8N(k$wHP$AXwvfT1)EL-?E&6nLdTq zFA@*HmwLR__b301zkRRgd(MeG6hCvppG6OwFv=2NKQVx_rQX$Z3q-DFDcOMHtbuC2 zb}=nSGqv$BlXjj(ahhid7ECVPglKaK;z#;LgZZ+OisWYuKBPX7xpErFk*@EYkKqg2 ze61oYkPXBN#&}jK`c6OUoF{pGlCOmyvi0VbqIH)+GaMDJ>Eg{$20?GwP~=nbph7n3wT-iS@IWTjG!q<-}5nJdNKFs75SDJ`2N60FM#00h+c!NU0ufy*_DlHj73t z5%X`Hqe$xxtHUL9%+{FK#XTYqf1a`&Lh=``4pOX3cy239FO^N zfStakz4XYa-?AppcGY?%Pj@WYmLvxBlKhq06UyFTy`Dj|YO2D`3uG#B$$f7PEjp~U zN;XAx*Xx;j?A}%@n)?=Uw67Bf^MPlLUonDdnT0whr^OXyCbtVRp^N&tL4I{~Dg4l+ zvxK9}?_3)Y$>n?i!054VsQ<#MMZ=Q@luen-sz=N_VC}l?`zNJtA`krH?K@>?REBq0S+(}^2UlFWDqHi30Pa~uu05d$T+-JrcJV1?aXOg(}Rs zl`@li5%>|PHxJjZT#h6)u5#ukqU%dvk;$HYi|x;L7naNA&)c1zj7(iIm+BYA&tK7r zwW0zwzaX`x0|CVQVi4}J(N#ScVIBUXBSyY%CN{!aH)SJ(GEwpFU}-yF{d#w05hL=m zqA}!Sf^U&%EPmu~34)ZMEMWZ|Z{ zf+Da%zhehlo-wY?=x^Nensm)O!dR`~B96^wloNE6>dRY#u#pQB(ftm&2{0{aPw);3 zLS~XJegtuFdsZ#-4}Yw<2z1ya*ZublDU*Ut>&i)(l$<$AW-E7gWuf>Kh>nR@=~Jgg zYVeI|2kH%1E@)ScwTRMO*HTWJ!AcdT*o-xoiH_PF%JHNE29RfRx{{W~Mn)HwZeR53 z{~74suQ)4?@;WN79bIYU3yi%hNhnxTu7in4w>kOLA9 z^_cPfyxl`BO^Jaqzdl`|Ez%y3HTE#{dbqX?j$5k&zQxN?z*CZw+vAZV-WEk=-9oI^ zi>;EFv9pBIbUMsM{{@)yaWwa#nUxs`jEZa5y%dJ~ZYpxpbwF;r5KM9NBrtI6bS49Z z{7GcMaXGAxDfXDD;60Li!JF~fHPwUU&ynr@B*@3ChF52>+Zzj(2PL6C2Mor0xpcaX zJz8ihH2PY@>!))WZIW^vV%K*vW$Xw?vcF2|dP9n=qCP9;7B^IZhW=jxJ&T%Ztkc=ADNzA zsx*6uOG(O5$(&<*ti|J7dW)DtZjKZ4%;`A)POZf?A4Jh3X-N5M*8W<2T>+@m+RM zso4=f_o0cfhnM$+auk~mI=kVgHZ;l-+V`UB8DLApLi~fqxxCu82ZpTHwuvkJ zMaL0c$(fK#3^%@^>W3#TVHR`5ZG3y0Clb5K47#1K#yLmQyhW_55~ZZn&H*`)Kcz#xCRQCFdlucHx%dY1wZPf=tL$KK^-_TTkBlg%SX#-AMe8 zDRJaA`0SE_!0FPPn@x{0rimZQd9k+}88MLx`S?6fu6=l1Y@h3fs<=&*q;z=urTS=C zK%}u|(8k5e&Y-zSmoYb|zD$^cY}p6(t?!f9J6m?2>Tc-Xy34Rp*Ug6P;_=3oS~ z%u;Q7%I5MiGqZ{d!-pEl{0|+1NTm+haNN1M^6$Gh!|V@!B;}D{h3pn(C{xBk%}#IR zO1TK6*^j5|!U4^zB>Fw$Ab?>qDPT1M^Jx#~^C&2cPdIB_0;KSVNk9r$##HLTSD_Z& zz)jE%*Gj)7d9uVMl=+HdJ8%e}9%lwaY;_kEvV>UsLHx;mMC@f3lzq5Iv&y8{w)@Z#?E z$bXT?tyF)?<3bugVVY6(e@Vg`2i>|)$^m~$WioLwW}oXXZ}=w;=N0{LOx0{9*as^Bb{)>T@3m+vEip|GPIJDHTEO0j?I58}) z3~@%Q(7?0uCeHM#BsO=kytmWFVcmtD#HF#V$&{e5iF)nW6D|+WjJvd;&5ukcPLykI zL)z_SO#T-IEgtk{E$oT_$8EEJI%wS_Y2C(F)`01pzGC)%N-d}qrB@+6yelt`_?uuN zPMGYZCo678{Kdb+IPo{#IN(js1Ummj@!l19H8oPMb}r|M+d{D&z2T^r|!8rbRwlE=7j zz{QM`99y%o-F!wvWl#jR$l|ML^ohwPPlBQ~Vi{{yBOjvrhl~uf zK5Vk45;70o*YhtM&7#Sc2dfA3wZq@0ZZ6N~v6zg&MzJl<$ZNrwqf-$TiT@#W`2x6Mt;TiS4huyA5^}YIPTFF^l19VciDe9QgSuo770l zz$Fvs?0FY@_UtE2YE##{%dGmgZHHfzsU_`V*H`P4*F`ul(sYs9Jq*h6rbk1>eD34Z{2K;_cLbZ46halLc ze2%NUKU&GA!WwUqG&=coFm>87tCT*F4xGxo74O@5Y3xJVE!8F_1FP%~BdC2FS9Isf zXuW-CnGh!{^D*Drcrxc3Y`W9=5ZVYqn-rEs?8_&q}IoEx+VFS zRga(VCYV$<=Zq#wk?;b+las#o#HsNw*`FGFDeA^*xQuB(cE3~CcEUYt6MjgdL|p=P z2+pPgOZ0Zk#7FPiJV}Wb={;89-U46uTu_QI1&b)P=+se1|88_^!5Um>o)Nj!lfI}_ zA{$}3*734@W4yItj?m zLJCa$`Rn$L_lRPSglt!uro*Wg-e^WHi@NW8q5zxYdq%ULx=%RZ(Ry~zKFHmgD!x8n_+?xj`!7VyZLb@!Ht zcyvx*=Ox|L<#!iwxI;b}HqA-#(_&c7eI; zh0-~Nl>BWL;lGfbd$~ThM~0`;bnAxA&t^Bg46A9F67?ijVTmmSHXl37dKJH@X%pJ( zv;J34-$9e2BLwPjbgdS-#g6)O&a!wuZ-4?=C;(W1fb*oq3F7!&Q;TDT{dSIuAJ0r( zTYW}1z5Y^?(IYRkcvPK{&UNZ!DTD2NG^^l4v6pZ*x!@0~FW+zs*VWLZvD5?b&529v zzAIr#Blpmqud6Eze&qzM(zwET6WE`YFdmz$)SiInkY`uE9 z2W8d!Z|P-BLFnbp3rcnGlI9P_{}G(V#2CJpq^&-OF7u(-e@`ex!`4!J7AZxIWjne$ z*}p)Oo)D;<^YCfczySXZ)mxzJ%Trh$e@@Xs6YI$UjQXTpMM3=OD}yJh-k2t_G}69%^Fr!Z2HQA5*4M*x@spn| zrheG^IKj0ez3X@*QK}PLKen)$lLlOFZ8tSxuEOsfZ4ZBRv~f7a=7}eY0qYvDhVUkw zZOeCWJKZrO(yrm9v!+wYKhPp+8sVTN>nKBQt1)2z7ZTr41?oJxD3UIFa*^`;bD2FhRFQI1$)e-S7>YM&OE5M83i$Yg1gC4XbSB(3HY$XeKc0w~r|t-}85eyvq znGOcAFmP`I@uNFB6D-U3R7zi&HI?4$T$XBCYp7jyF2hIU++&75Z}~Yj0lG(o!Q{%x zle@H4z=iwQ^%fFV}$@P%l|Q*S||Fc=aU(OuYN7&dFa}V3Nc7J*3pGRNHysT zpl1qYqD}+z4udN>1yr0@uF3~3%~hGND|wBbU_IaPN$MmzOSBa(DV?!lmqJAFWhao7 z6XK-N{+v`HO%=al&V4z}>Sa|@+Qf8!nk9bZMS#vdzl+RDih{^-@~-07nqb7URdH*R+DD=7!&A9Oi{-a*?F%R^?_>z|&W zHQ+4C_b)3pp#^K(qJHO8s1UDOMw^aDYOOebgZD{HMbGVDVk$+=PF2;lVmdaX96DD( z2>^x9360&?xbJ=C?ww+GUzY7mi#yf$i@Zi^^Y}?DA8FLB1O|#d@$jX3gICv(QdzlV&8dxsHV(c+LsK>QTvzU6_ zYb0#5dCxZ%c~~}R7+|_=M1NiJ;GL(M6jlh!W$wT&BZz#^;TRxOvOoC5av{aK*jUdB zEJTT7g$OLq7j%VOxq7lBmjswrMs{Cq4i_QLuY?I-R*l_PX%)WEauEF6LE{{cM%g#Z zY=g9-pHTq4-?B_^ws)ot(CdUT(Q;?3ZgB%&0-LSJk}S~oODd0f;gmE$LNlWC)*SZw zTF2tWUDe>}3GAgFzfUW{@fr-5%+TXNF!#@u3xLK#M@{^pJ@RwHxR(mQv$rbM^u)yF zp7gc4+^-scO=w4GnLoUHm&|*G%B4)zdnT-@sLAXD{t?qVWoK?M#QmO7ZDZYumcROM zT0RXq?@|A$uOb2&0IX>Ab9ty?U)lM3)bo7LPM+d~0IDZ9U)9X4Pt|IhEccrc4$Yqg zxN&t9niz^0H@V{LX*57HW5=4LcVn`mZrtz!m-E4LWa#a&|ZE=ZeR z_be>uWC0uQotqmp(+ySAn|+s`Jh^?c#?)U-^^qVEROY9akEY4F$EfL{d=!)6%BG-- zzxb^*e?e$Rf1Wl1QT?k8F>OCoXwv?=Ung`f@oR`*z|{D)G%5h9(2EXaoVg^$f5Zm< zKZTunJXG!9$1R~Oja|ej${K1yXo$j8_FcA;rjQxV!J)?|Gj8yk6(bnRAXg-|KsQuFvOvU}1Q)$#BKFf7rFv3#c^C6nuM& zOO0Gft$Kq{^uZk+fBQMx4ywF#eZ10jN%@}^6Trc3hCtkr5v?qLPeTBZoa}i>5KfE4m^W45!H&tNIy2!R)_bi2pfs)oyorVbu+nl5 ziVqIJzcjU0;LWSXA>n4vmdvWwz`nJ(vB0=#2PO^BiHo&%ecgXrM@U_;#^7aMCflK* zu?J85J`Tl@CXG@Gz9}c1FQwCP4okOwbBpS37P8a>qfV`z9k+`X5YFPzTfu%UP!6y`Fvr_P9?4V5;X6Bf8{U9#rCkAZ zM&uVB!n66B@`9(+a&}!KKRfCf^oQNN+6$^tHoMIK!>*$7-0ZFr=x>*b-P5X-LgxBY zo2Ug*pNH%q>8qqJmtk=~7g&DYcueN3PcuE3&z~%j0gUYgSS9wn57tV0QdV~{+bxEnx{U^j4&k6Tg_t{mX$_Yq$xe=@q|jc4#`MB^ zJT!tidMB9LT+XqKk3JFN=!_dS0?dknKn##1>;EeT2o)}9LyEIBz=e4SFuw9d_vq)Y znKx|vFBXdWkaNz_)-AYMGNnQ9zLj_f%C}~7N!N>u)Lf+CfEIdIU7czh$QbcAide4T zZQJy*?<2fUv(SP%PV21I_X1kz7G8vO5oI)0xCIvcYt6{A`!}bwQlGSad^&0sE+dig ztCN-J!D2iYgG*FJ2{BPzy1^u&y=FXDd67a8y7BGP|L)Sh_Z*1ci7meUFD~utdnA|k z%FkshXa7&|yHfQ-cZaL9*88w++@nx&uAPsEVL*=wVw{~gi>(snR7!xUfN3m@nIRqe z$bxi@pG5F$L=in`nIEOo82`J5h_9j*7~_4)pr(1ea&G+SOCoJiMKDK#1^!`Tmo zu(KAj$s(@Ez}~eSFWD$y#q zslU<&-b60sArh0MhfMd8Ut(rM_CQZ8FfKQivy3;fi)0|#R9eO4o~zDAw8`&mCJBRl zL+V<9>B#dX+=Ch6E=t$PUla#aJlOiq<<`$o@7t~|m@_8YX~f5JPr8|q*x0k}KKaw) zlj4s{p!Bb0(O2I@&cJP`BT4v(=^IBCC}>G;6Pl`dvTGO(u1uHZFzBch#Oi5#?{oUA zMDhff&?FU9`${$qfOt^aXNUDLXp}!L8o++(*YdqI@rZ`e_9q$WGiZtk%BdwBGNUQLOvKhbHU?bZL0ypyF6t66gl zm;}?$LvW7=cpykxJulrHg1_Tybvk9?!FUgQFW7)ZjiG5RKh5P)A-N+a_IR~*prd%Jub(3dwV#iE zEZRnitmR!zrZDwcFZbI$fi zpQ#2NyF^|ZZxhg}_2{p|uY5RbnD8K6ZJ*(Qw2)?}wekp&yaRA|Qo#DxsS?SeI+jqSMG)is9$_pX3e;QRCk`w z6Eyf}-+>ptnm-5fB$ja02cI*FiDNlWz6!au(Hs}CGqc@Mmic~|=QFFJrG1@1hjtXy z4~e%c+1cVu*QrSvt}^-J7&3CYOFA(;0v#pDtP1!!v4p;BvW*`n{US>q(dX{NUrV`ti>sUd7L3MP0-oP`aRTgYw5brGKhov{JH8&ZnR)OJ2X6Hj z*N%E-g5%w9Tu(o3p@Ox209&F)dqM|)8ypzq@>_T7)U{4lXM#FbS?FxaC!G^bZMM9+ z4tmuQbQP|}fWbv^^L6{ks3C9Ej)`TTPs7Rx%f;*+b8A$!FHS$N0rHb7YlE-;Os=Pr zQ{twGcgc=sfxFbo@AZ<0v(i)mIIN>SayZmhz4f%!>5C|cW!)L%h17s1v)z*m@qbN( zLIG`HP@`-xc!<{bo61SZlQWVZ1OuYl!Sb-gF-ru;V-o?-65R4%f%6Z;4dlCb<*tm4 zT`7ejX`!VvI;>13$7YHQz%+8p7l(Tpo$_JB4f^W={o?Bv;zK3iLCjqj{gvE5lo;fd zHH{q|VzJ(ecLFb~dW44K((lhkhDQ$2inQ@ZcRq7Y>-^*1b>gOVEt)4}ovdHpbt^K@ z|3sf`Dm|bJwcZkK{pP34+PPS-&Y(HzYpQh%%*U0(ohJ^qYv&SPhZse79v3M#nTUb? zTTjUjU*9&)0S1{kUx6pKuPYG_c~z}evFZy5xUz{>?k8wd2OGRLnS6!W@2E;KWyJGkUt&UFTh*2NVjj=kW%jj~V001z!4 z=ACav4hf=_2vC25z)FK{a-HCIF%1b@(>NH^N7$**yWUBYO61yA32R`g-kGrQqT2&s zZ1aW~`>zx~03Uhl@0bL?Vul+mpc)cp64nzfU1rpi*eG&?8WU7Xl4Pf1!!_iKpK_${ zC;xLY0h})InNl8x8hkL6Jpz7odsa%}^mCw|17HWPhf{dC+kQ}x((i~n?<}jL=p9a@ z<9^KPtHyuVYuBL`*B7H;P2iVO8ICwx_P&$c40y;=GC7R)u@F`J-|`;#me&bZ9#xFU zJg^Th!=rFfc{Bw+ujIxWBM>U0T(6i0?6X&W^QWn?a#<*foA?<)RQJ+am_wkw5~pN- z7sfTpB>PChT4dEn1d;2VMl0o-hg^bZeAQZSZ%fT*?fK_jkzO;p1^Kn_+yjstFP#ra zNvx;BrMYSMj?`B;0sS zFuJaW4L~Ou?IWxSIxyrDP0$laaSx}5DtUOzHO?=y^m2JYfcOG)&~ws}entE=bCT7$ z=#rYt?lU1eR^i}WaqU8Z0rKPflqR^`l!q|k(Zo+khOK+ubx;hXEPh&3dhXVaKhK_5 zEWuW;iN*%L+&b5&xM}Dl-pY8w8~S%KsSYAxoEeE0RatjS6)vupzw^Mi4zR4J9^a9vEO zGsL1|=&T;B!-Hc|XANCOT4+&_Am}oQeN;)!5I#Ng%dGfD89Z`xzBJfQ5Uq?0g3AeUS9@IhE|>w~}OV)8>HvkoV#COPN{LT#vk8 zt2Z)j@{a(~lW*kv*4-rOL6sffa^(OAYdJ-0AsgF9gwSQe2wH&X@4yh*TSHt#%TNt1(?*1p$1*$&WoXj%(3D- zcQ5QJ#PkYUg9UjMs?vZCI$TX&{X=JmqECeM2>uCx|CpLx$`!gYuDe(vVX}YRkFG^k zURe>tw{_d=^mg9nvS?KtpkI=2?(iG$tPXR5QosdvzxGoCt z$$I=Gfzpq+2F3?10L^~%hk|tHo!byiu28i+0-PzrVDKCekd-_eW}(>Fp}Ancc191J z%LV{ozGVXd7!U|yD)X?cRj`u12B#u~Q22#>5x;tCwV54R+A8Kzk+(poe&f<5a*v*K zT2oU&Cy_LPGej(sedjw!v3{YylrY}sxYF)>cfp<-T!xEu)CFu&YJe?D)I%N!%*L!8 zEi#ZVi4r-oMksMF`zOoUUiq(+KVL}Vgk4zs|M2{i%LBzJSShuf5=6EJK+gfbJ})q= zG0GhyJ>s|)s`}>jgj5{06DiB8;CT5#UeEFuCDRNU65yFEh+SOUYPR?{idoz^hcctc z&442k_wYk5d(L7ZTKmy)4^n0o##7c6!_jl_B86&KbNSP0;&tq_AS1DeI66n%PR*pX zi2%0k-ZNP@3`AaRb)vJ?W}XEv*Z1a+PPd6tY;c0IY-s0=Iw-*C*soU) zC=bBofdMQRHt;f`m;%bDO+Q@6&hS8dvdDDe(V_H-k2t&!J`FL&9w2#0bHLqd5+>n8)4e;ua%TPUO&4#d!TjvD`IHe+m+wqABkj zoNs5r+GI!s>cQZx77EF%7%V;lk~d43R$%h9**@|sc6SSR>J07Anld(@sT0nyR>Qu_ zPhkc@Fj;M*AKsf3%f|p*H1HyY%3g7T%cCKt?y8k0=-`j0laL`{!mVH11jZ{=3)Zbo z21^05#asw*jiv?Hew&@KV*;teNz-jz?UZ2y0k!l8DBW^9Rj~0!uD>Ft|27Lg;_|N} z*?vvL_xnuig>$EG@^@kLoJ?zdbt0stXU1YVLJO_W zCv!h-*}a>}{Q3SZv`DX6-2%p&B;T>R%A72KsxXP5VK54m2trhI`mBmx(#zV{ zInu6zS{==2l?XBO^i7UsOK?Fk{?ekyEXECjxn| ze`kRpJim|8Q}?3d(XG1>vcoX%zs<(_g-QWYTElLe@&5AL%%^F!{2#PFiop zRz~d(ix56>b@e=g)qGNk>2`{de6Q_WxRCIF*6yQFR#bxy#Qy{EQ~~2n-V>tkL{`UY z&0Rmmuj2DpeT)jObl<7A@des_b`d1V25nwoq~e9M<^f>hHSU>co8g(*{m}-YwofiI z-mkS=3Wl~O+8MFVW{YqX8E6K**_pPc`QNK@m~X8Hg&Kle5qX4L!dd6!IWdLU*Nlkc zGiH(n$H6or(h^BfuCPB&?kP`30z;2(u1 zR+FQfD9dIbldYlRvSLo87bRrF5U656yei7F$Z+uFv&!-!9(3wD{QY)By0oUJmuQ{- zU}FV=;Y7LSZ1uxnRdzVY10dxWlIkcKoJet_HxrwC@n~W6^hFyQekJ5|pV<4XQj zka1?kZLfD%g`ld(`_Jln6>AAWt9jnwML-$NI@O($<9KJ{W`C%l?Zl4-L0J7Mr!-?21u}Dy5k;D zu}!eeZ*3?R;L}9xDghYu?{zNJxF-U5o>7it>+~T~$v2ua{;7P)^J*yJ6~TT02(a@l_L<@JIZo3wOYJ9t9BNNUnvpIZ184_1fah;Vh@r1saB z^4y@`7jq3dxmVlsiow+%)C~5)FovY6v>3pvw$J%t@r@7cp&Ec@j$@T1u-i81-!`X5 z*u0~!^hDZq+7k7};*;b~0?h1x(q(|(>8OIVD1hr(THoGWk=iwDyIPzQf69sA=(J+o zn#EcLV}QPlry2xM(Oe*&QuTxz|DO({_ui&T9ig&XSsUK?V&dy)5>MGnr6uw&*J)SR z4O5d0C2t!+(VG{Y3fFU3G4!F~;z`0^Zy$VT zlJGjGSF&$3BUtfc03n5Fp1KQfb~InA&8`q*1q&GG=||Hzpy6L2H1f*;LpyQht{w?} zDZ2kUk>FaSr)>&iD|Z|7sH6U!z%}z@JhB~OedrN<`}Lfq^UV}Y43>cn?*zZ0AOM2< zpX5w(`QSQaEYTvqHz~=NXHUjQf0o%dBkQfeAN31lR&xxOEgYHTdZp%bVXN280=Ana z^M=FH$n=5rl?&BI)^08Qe_`>YwGkkoEIR+Kv^%~Pb0k^b?3|sA#qp8cs#eTueeM2Q zRw=0&M&6mX$~YF!Y0ZBc@63#c7`f!9BKSXd@Voc{RoLU+XN*d^;RK${8T?=LBS%Bk z&gkC!%~gYxwQWuf#$< z8{RYfU|Uq?Aa{;;r6QNK#6360aqZ*f7`0!zmcA$#O&IUm-vY;7hF)(~XT1)z^^vUB zN%3zuI3{+VJb)!Eeyr>(9HSapt}qeowH9o|?{6}J?Id?B@ ze;6cARwKErXHV_Hubw^C2gs52yfwGqfg#HubM(&6%mmwbr==-usFw)xrDfSCx}4Fe!z08bQ#&Wnbn^7?N5QM1Xrhw-kkK5F5Q>dsK@@hR8m zyAN1w`gRv3uU{u{rehw?ncc*77Md3;+No{Taa0R>y*|L0pMB|3a}#B~v}WYw*#4*D zky*fTQPUqk8tj#(-S_zk4A(k>+OM_RM$UX!4mUbedp<`qdf@j-*rh0K!Hgkt{%+xu zJ6Q*+pYv8dCOZ}RMuVC8M9(kwRa<$4pP7|=FUMk?$W8$EX)U|XaoUQx%yHW285djI zM^RTo{RCLHuvnSj=VqhsRB9CLra8xFn+nq{g>Nn@IMV!E$??y3z7o1do;5{Fyr%oD z~P|bomz!S1(HN*2$p5MFmdvL}ctvf29iE0gcgv%bRO+ z-F6SlD_FdW2DmhCu3w9UdDniC-4l6Fiji1ZlCtP=Z`RB(z-m90k#q^lH{|o$&IIIT zoQ0(Vy{d>OG*y|LmI-!mg=O-^keybw4Ym z+r6s*Ny+Li8~y>6xoLf7Qsq3)NA3W7u;4kndG8}qYv0R zySbeMIy;Ah*h~|nJVeL14J42XS)k$3+p(%stJqJ=<|*n6_ra+K#qHtO6;2Y=3mss~u@Nwm*(%fEkmwqgd8dE$MI=KfHxLh_`Jr$-cZ zMd_`rbxoJI?DQM!v_FTt<(+L{I^x4NrsgYQTs=qdz4>x&+7mrpW0dD|@@LVhHzpt7 z(<6y5shFgPQly4t7HPoO;7Xa-Rrus1XS>u#XM!(pJbGXc*`eKLrKmSeen8pK>etdT zSmGFtdr0S*v+~dtX1uL`QJP|^t(kV5$%!$H-wFuT8wMFI96+D-#~N()HilN^@yr`9 zDSlFIk%uUg`BALfa#J`#-!i`NCs#-w%vVfZU`W9|bbE#+=y$Gg^@twloWwF2G#}%G zyCkCc1;%%SuCgA(uFk#p&u%SOQCTnf@GHxAt%$#~SL~CbvXoSnnCtbb*rDZ^Emlec z`VA9g#-3GVy~CPh%Qhk~@ew{=a;dH-GT4J|gC!%unw5JzrGrjsF2zdtVS`C#z^@5z zt814wSW6N@FCYKfWJSfp4xGU$`oz~&PYpmD;PYq>)7mvUJG~+cZ^5g1SbgkoYwLBY zHeM^dm4_oaAzI%YN@ZhJAUT;tg*YjAJV~Kq!^x>KQ6=T*$!+9*hDAHoV5!sH8?}uCAQ4%~zV#p|}9j#KX$bpPN#g2e^BRD zmLq*>!(LL2qVHmoy7SbtswEhwD1xS0vc-`F6-eKkZERan>Z2Rb7XnocoWM-yzSkUf z=)wHd_XnL4GQy;JR{Lm)Aww|l9>0Epb8nj+eykO72_-d@2hO{{9}uW6Yg)c+rgP?$ zdFUY_(0fF^>qFnO?K9xVXZP9KWJYJ0|CA5sg71BXnFH{<87W+}X`e({b5_9Szh>-T0jJ9m9V4>$&NAtcyXy^cel zY}?1A#M|4xmn=%H{U$%vr3QzA?926@oAUFn^+sTww5!Q0Z#BlKcmzRM8s+;1yrdL! zI^&lZ<&9i$W$LKz4dg9Pl1*v_tHE%U-?sysChZeqhsG?tETr?o$BArC_#Bka%~o{K z53Vk@52o4FSG|TRaIgwdC%lV2Ow9(LqA)L6tM&m0OQV9W&VZzcyzrC^REYVgdVx$Lk_M@H8~ee3_JRsD}J&6c>cfxCRaI5SJwR}I0xmII`R z=J@Mm5)0VJE3ndwEg-Lf5xY=JcpWmt2p*6mF?LUyqQuN9WjvfM<3bBeW$*UDAukx1 z#Z@J(`yzz1&Cw9nw}Z`~`A8Fp<85Fg%~@idJ8R-Qwu_38ebxZ4&O2)y6S!%dWaUW4 zrQoUEKp4{cUN!bGZep(yZ@RPWcS5fEHy`su;7Yb7WHPiy4=qe6+7A2omTP*&t|1&|_78h)xE zIwg6LH!9@jP#M#x!{PYAuNLrX5l9nOUI7%CV4gZjH^us-@_93TI?5D0mb(vzD?Qhb z!9Aq){EGBFZZT^GSG2LO3IJ1#;7SqtJn<|fzH?%2I8BhlV0CR`367o3u>^y^Js_+9 zAC;#OOEAua^V$7Xi3_X+s>lX1u6TPr^h=(40HK~l=e_74y`NA36Pv)Rs?E(cXIGu@ zH-5GtiAO0Yg;;`V^I4s7iTBpA83P{NHG#g%AU#AC8cPUI{jCUHg#+V!l2C9g;qc7Z z_Bn5Hhk~dLJRqlIfV98sD`eHp^rrMs!FxvN>I8ZK>C3?2RgRmzW+FZG0oPOnq&5m5 zlN~HUF{1(e4YC^5haNx-A@bPrAc!Hx=eTSKyaQ4I`N1CV^<0dvI#Apj>6?vfTEN=( zqvv?wN(1Pe6acVuWW12Rq>$OM1SkAeV0l0(ps|(2}kEtJNdzY`s54#a|t5fT^ zA-eEWyBnS>c_(SdhuiUzHCh6z&&_k-uQ6-r~(+G6v>Pj(i)5_HWmvz4+>Om z>}I8T5v97c=7RG1hrKa`|K z`jS?wzxx3fc#8xvE>-DEkC(Bhe}B5tdnV~#IKg8LLK%U2#B(!y`I(*Y4sa#DcqDk$ zI6In;`R0&~r%`Qg?@xU}fQmep$I&_E%1(H$hw#%FoDGHqLrWb@ zs)*>c)WtC^mumImA9&)4eN|@fJ3|aO_wu8gaedZF@jdWnI)U0AW3i_nlNw}CkKgh8^OA_gCaeljJ@?rW3iu-f&F zmVfoi^yjBxEYU~4F~q=DV`$~QO$@r_mpGuJnZLjy*_;^FM3(7N$cO@?yc3)z{dWJ# zlW9!)yb$@zpne0L8gZp%^wm{iQU0EU(53!nT|3fs{tnvSKW<(90$<}Uw*-LhId!AW9-Tdxp( zWJR}{`&XniGsq2!K>AXeYRyVtRW4^eezO$K`3(>sdV8+{*u=7&GyM`KWB%H_v_;Jy z-)8o#PWIJ7kROsL#0l?dm8kT8yEA|edaIP{B*ATs7_$5;o}U$Ro522d+Y|J`FqXhZ zDIL%DAMCAG1n9C@eVdo}9S+vv`UU-If~CBiqW*!mJw9c*FtO4IyvC9sE_!J-h*cmF54vWUKgQF*{^ut z8CvtXIW3AkvvMHz)4N~#!xTXucA%>s!iJ6SKJa4`Ts%n~W;wfund>k4r)kL(@2adPu|(En`6P)GY+9KmCqfLU9~zNiRKgOh3g)Cp{!17jGI% z$p75zz?WLlUeUZ#H<`ODY5Q2-`tQ;ok7WH{t*r;GZd$L~Vy~G^Gw1QHV47b%*y1*+ z%_(FJ*pUmIlCY+#E+zlQspV?1X=&^8yp?+6`n68T+Ot~&bxeD%lA;O~Hj*;&fOXUZ zzaj?LXDfPXa8#N?XdpIgaihOu+oqYm?{r&hJ1nBj(09St+^Gz zvE`OkEfU{5y3UtJmIq#S>A}c4;8PYE9?S~Y?R%t$9za&ZKzN*T_bv8S=g!zQiZcP5 zu=c)hVYW#oDf!~*23%qNKk7BDw6Jp>Si)Im+fF~aY~-|jkm!7+9kQwf;v9Ypsm$Q8 z&WuuYD2kE;fN_ZLv?IDK_Dl#E`HDO91pE#(kBb8pE?xNh21ZKoX=yHudfH7hNPs4oL z?bBuYpXOl`a1XP8#1Z_?noqpB5kst4!b8dyG?(dBT)im8uHhbcS&Z5VP&3K$&W11? zO788_HGh#(nf~$Lw|9}UyoVPvwdTnXOX#XiuT)2K zKPhVu5yVIM6zJoBS&t^v4z7e6&eKN{m2%yJ_IM8JsD!N4%9TJR!Sn!b1G&!kJ5rq8 z0sl!&bQ8kUZP-hN42V}4saBS~@ac>}2p=wk|Uw^U($p$hfJ zPipfzJ06k*O7YksSDBt;jl| z*G>yY{bn9n1Mm(sz&4R%w^Tx`i&vD7ZhJ{oQ&HyYfvPfHtY|y*dXH|;?bF;iT7Cds zQaq@w&6Ml8p*~up{VxTTomPk|Ehi?gWEm_T9udx;1)yd1-LXo@y*TXE&H#awri}?Ybac;zo~gk}Qf|;*Ht;1ESFAfr5_Fdu>nS z+E!A04GHxze$Mq!E5Mus6m^3l@V|unZxb)54dBMj5ko32)FYtNKR(Q^=f%Fv)dx`475=@ZK6*Wixg6_M1;Zu3rZqMMV?*RCHf5gz^ zkSCJBny!yv3F&W4t!7wM#J*E5!Xh7qcxYiUx5g21Vn!C+ZmC)F04D*UU7JB#{jdP}?tbK@x*M zUyyA@^Q~L`*dK~;CEpuAAghQl2mJPn z3?%&4s1SmHtcCdY!o{FvbJm2^CwUFH?qa{PVT;p$~y32XdO_ zwV(ltF}Ei3mKOb>5f{8y^rjOp+s=(ZeGkK~=b^j^4&Y@}j%zZ9^%WwWk-o+;kBOKT zUy6^x6T?oOxdK?JHJLFm^fvTu&b_(%i-N1FgS)M*XE}GQ&Q;!zjCZ@i>+7z@iP>hx z8S`_+q^7Z%_>fAO`W^eaEa6@|H(G#|rj1wD+MSiu6M31NLuY^D%Cc{ov>`hAfF4_^PPi3FB z;r+)xnE$6KMLYJFB-|R$WddV{Dw~qVq`$hMfbDhTsmg{0)E&7i*OP}}htD7R;*CDCb9gkNo;-=pmIP+Rl zcO>SsvsEr*^4w`6r8QN)7bT!5X3=-;^!8-=cmBH!Jz%?Ktlp%hux?EtdwpYUjDFt> zcl8739)pTg_3J3wq&4d!CyIb-A&xJkkRHDwVisBgmh(^3A z9~QZy+(v7Ylx-&qV?$wp#5#9nu|6X}`J|;)4CuHR%phc2os2}LJKm)=>AVo1oJ4{x zSz%$7K8PB{935+54xn5Dv|uA@CI5zFgIR$Fbf(w*XU4Vre9AG_>5= zAT=a}3$%DOl4s7?R>+j3Z@ML}1hTnd18)REaFX1crC|hNhM@D8l1@m{uYDe$0GT0p zK<#>h2v1Q)X0u6@6Z4&!pA?;S2Dd3H9=8=;EN z7{8OXr=gH#m^hq%m&Q6g@3HyKcNI*~*+`oq|3UC=o3!G=NCl3-L*S+TaTuzQyp}(^5ZVJ>d=(B%;02OGduL)eU~HI8*zDVM3QhyvZjOQk}mNcU0+i?{bOuTutLHw7C|;_ z-&jn4#4a1DE|6-~?~6Mw&9Nv-E*fe&-C)t)(ST@{--t%~a(;x~IA)?7J!&)fslVgv zQHnb+U&pW29yC|`c|VEuh8Z~dsi)I4@vG5Rw*ms{nliUM;%tJ9G5ffRID2)DDQlG> zD5Hf`^uVlx8M&%Ev|Cqrlf8Nl2R0s+?pgd2w&wDNBek}&b-_!hkWy}FoFe? z7_ziAJ!q>dRRCJR$A5_&gGbSfsE99;yZg-KhRT%Mu#@Fn#61Tmt9Py9Sb}5ZN=;fR z%ghnd;gCj(i76)Q-B9&E?P&H&XN%U;ghE|m3+58x#Aw?F&|t-l)3j*ls+PFBId>iF z6*W6^t#SV5tUlZJJus?7OE`Y_;(>(nYxzY*$-@eL)KxEoszUCBFUUlN1ULy_S6)c%PG=#Dbk=M7 z+;xF$SyH`g;c2`)wO)LC**neCXFTMIC?^YXz^}!tjtDI4TX7Zla#Bbrad$qGdggWS zBG(o)g33m$-wJr^mm`LhwQghE%PH21mv!dx1Z2dSD z=y)$G(r(yz&=^DW+;rx^ok9>Fm}qnJPiC`}wo72^TUgZ*RK^~9vJ1(_$5;7{y>u6I zLRK9WRtb7_L6}{MlL#_PH%c?@_X=y~Y|0 z=70eOMpQt}*}EjcBqfIFG&ainE@Y|dWG30hR{A({(>d3VE}7{Mq&lNBxOuX2V^LSE znUu5CL09)@wlw}3C#O)&;O*b3JeXx}N__~L(y?)M1$~h#l~eGvyQSm4B=+F3f3?~ z27W^s?wddtN;EcXY$F56k^P`4lgK|o{n|i|+~ZMR7{Uy$2Td3CRDvj_7{}`LfOO+J zeK#zGa-qj77vUR=fZtoY>mD?&3VW#2Xxe*`p8#E=+Y4NCbRTVdp1 zMq&EvE(F!ahSY=&U)oFEQUg4H(msDf{L!@P@hi03#WryVqG&7s=H4B8ZCD)hPm=(v z+}383Jr0Ik+Pv^AhBjB(XhEN()cni;esVv;cbd?Y@g_58=w#Oj z?GRcIYES#DkU8LAq@A5>_i;Wn0tesf)_C!5;+LPfglKNkg?ZDDWl*)Cw{4kX2?527 zxws}VA1pST=g*HV8P62&HVAk(D8HGky%$`4<*IaNVx>P`6>HSy$qLaainsPb4;t*d zD($Ey*^lr}v+#KflG}ecD-srx+}ompca+ZwPT0ylfmOhxJ%QPUji4rYZM zdmd!7D9I!vXgHst)&ui)3N?I&)oXuzR9UIDj@S zbX6?`V}wjN7X~Wbu{KWNGF7?iyb9c`P@weCKp4;l;lk*ZMx|dA*j(1wBR;{SCV9u0 z&D8f)IX+V_sqT&tO~;wqRs^Z`U;lol(~`Wi&tO!;YkCkmJM}iYg~mAbgDhS$(}#(X zyI)b#m);GFl@~STf*fnzhao3@jCGl>&|G+VdlFP=Ie{-Js2DGZzF zv@Dgf);qeNGr4ZtD@K%gIzHlgoz4nQtr3r#tuiJv^xmi>FVa<-|MT#s&}S-+A0^wqBXf+O|iI#lwy-Gi)ubU}ADC*{KIlgte@VWVY6 zEgY(4eY6%ydwwR>sp`$v)8LgF<&EGdk5kit?HHHnepcqd|1dY_<62j)l*O#dX~Etb z@3*(~=Z?R#96-IBMwjhs`9oBD+T1pilcK(qm#*jTmAiZcwo4-d&oOf)7>hAZ6WFgW z?B6_QOn+5iRs`F0rmOF*DLmrlzM^pNt@G#eTDWOD#n3xHaw&JcD!k{lY>riui)12m zFZsS+un8=QijI&dQAh+J`DcKWNqDi)iKul%c4@zur-Qh-PdjKTRgs_Z#>x#FrUO8R zy#!9fyk+W9S}wPMK6$rNr)~gFRkV(D)`lfTu>z5zNs;axkB>7&d)Bho)(b9tbe&s? z$VGH%2?GDNa1T+;KhFGLHf;O)z4b&mgC)o>YDMv>Sd{8#n}86-Z-)}V)1@9Cw(+h= z)tWNflV7f6lzfcNUw=WW8LwJJlSxqOb2<0G|Kd6hBBQ)ucS2#=^Nq0v_=nSlI~uy` JrMIm@{|8^qBMkrm diff --git a/docs/assets/images/icons@2x.png b/docs/assets/images/icons@2x.png deleted file mode 100644 index 8932ba20ffa431194b8cebc977c731bec3ee23e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 27740 zcmbrG1z1z>`~N{ch!ReeMuvh)Il37jIi-=3mXz+;NCoMbNOwt#2+}Ftjda5pAu$Hh z@E__&zxDh6>$;3xu8K2R|ON^V_GD{nS&-1_rG@G;d8Tf5ualoD1tzS^fJ z^sSG;UGHpFpNmKMEGNGvzo1WGjv=})USvjX&O>)bU&C4LOpq$fAG7~huhg5v8>xL4 zN(#1b4+g1Rx%}A{2i`3#*z*fZZxh-J?7eHc0IX|eSx-S-1Fy(fFQ0H|wg6nW(_bYE z8*5A7p}2F$sV}prh%GQijjJpz4HCCcasZE~5dN0ZQ4&Bc-z$h%XYF$Ay#mf-10TqpS8xi`4L^wg-sSXJPBdbzQZ94)xVC2wj<)4P z*T)`?&ryjGwjdM&bG0jZfz|#sZ*@HYqOBX?we1BXGOZ7H<`+^SIuD&|ru%i725{V7 z;X&i5&aT_Yze5$iTVo=wqXjkH->wOUZz5_}sG?*Ghi9D80~|UQ4h{6fySFk}kw)&1 zRBvDgZ1nos$`WVtYv2G5_)F$rDF}}?lj@Kn8kM94qjrI!mpt8B*h=u@d1d3u5r(sT zxe(@SHG$~W{I__}L!{UV&C*h?HiG@vxWcn+)&oou-mQtmw=wTfEP=E6`%S12z!#1m zk|7ZbBFekh88?GggYesZ1H6HI;fva*XE{H2KWZ7}cs{LPA{9T{G@aTZRG{MqsOcw% zgPXI?j|-cC<@j|6f6WsE0mq&~-Z?NchIG6=EWi$9qi?mNxlHeSLl53haI_968`+Z` zEp&evS0B7f=$iaVh%jk)QfHC>a;TZlR#^VtT5*J^KKbC62ij@1dNNTF>RWFWfiVA? zr{nFDx7uG}_HKRKRsyh!4V+hg2RWr$$;AOm1aS^^(d>89_uIwt-!1vj!htyo?bclf zj=$gZzWvO2i&@_Vh3&}z17_cKS6_Ab{Cq!)c=EKLyWB}X>)M13ClK&hw2l;kjnIJ1 zWZq!!U`B)aV+I21rlUK!bjbTe^%f~EcRM4SL3Q=D8$PhGx7h@)q?i#}>QLjkd-)vC zDWrwqRq(f!qxxOX>%Wcy+DTyL-nLd~1?YPxJsX%--u1a{e1}qyk=0Hb!zTtQ5}CF2 z;pgf-J)!)ueNf|8&fY-L^`+W@ZPZOyDwt~t%{oM(M_0q`{cgu%Bymwi@`icAoT$$z zJodRpvx_<9izkRpc2eqyzbgZsS-Um(eMSimPAA;UtK1^nN9W~%ylzmO0zc*Z? zBS#GyCBWts(dPJ-H^Ym65a;sC>@q-~QlXGNXRQqAvUI#0&)CuOpr)Ppf!6(!ishh> zU77k-0?+1-yE&6%^BUK{wj1WRfhK(fXZ486UPALZfPlt=Yu!g2D+@8t>mU2 ztrv%(?_e%rUApuc_T+j{lCWpl&i5dow`t%Ts{Hl%aK6LHMs6DUlH&+95b1J<{Lx76A428|LJwCvwcB7!$&t03hhBrjV`Z7E0%qtK4 zgLRpflG?NIVX6znqPWj$hmMlSg|l?~^HM8%k)B$~kO}mySb>M8(i3K5#|GMf5tV7t zNhttN^(cojs7D`w8a??6@)gb5*uH&huqI)aw}`fU@5ehP z@1>C)*U`C+JuP-tv=4dy5=(_|+dUXgvr_A%b!J~Ra%E1B4-grh&59cC?&TC_&R`^1 zmtFW=yQw_;(zVqFpbdUknO0S(1{>#Ra{?39R;oy%C&|iSXP?sX)|s|wH?#4G>W_J; z_jGu_z$^EkW(IxTagl{hiOs$gqIGYmIF{tmNu{y3}87-;=R523bcxi)ob8!@5adbhlJPMwWLv3 z4)xXTt^hDEyvrYDvry7GB>1lGluw>wD;Cs747lnV9T51Cb$Buj)S4OobIxs-Q{6Nl ze63*HlYZmiqClZYl@WZ0mBgM@FhuK3lLQXRAp}OCa3^}rsQn`RHn0t(V>6iYPG6B_ zh3@dg{J1cj%$68H3}V!U$U5T2A`p#pnTOM%mbk?>Y z>ZGt5YDR4n-uPf)1S$`! zWN6%;2mmsr$SrLj>c~TR*ADOC+zBn z7uv+Q*8=HnlqEA#JDhYR#irRDsSwtp3!dM1IpB6cv}}~DqaS>!}E$__(0SJ*f(e;)_;Zgs$VmAlV4YhV3o^7XjKS zqvtDEvU=K01ebz-zc0A^XkZuaiYklrrxAet%WTfkZFg~sZSOWqSJ1|ZL*Dv)`vRoL zuUo3m2qt6G^EjTgEOKsJ3Sp~XrJ;kv7D_3+G zDE@sIR(-1TXvvi2CQpp1x+h#;zMcrP2dX3?Il~+Pp9DtxjA9nf75VT{4X24L{tPRi z5(_(VswAlPY4oAE$FoXrM(|2+B7;p8f{at|Uf5XnAijC&;@;ZuBD!CcPxv$_i9sbY z#8d#8h2sVxhOeCDL4X7Fa+jFqbV&}W!x`KhHC#!t39jl@dq9^|Hem8cqW+P2afCrP zl!XfPdN$LFXS`uGV}dz&Zf;PV+iH? z+C@2NHWygfrSCFY){=$twh0=DfJL`6(rbCi?=&bgp-JJtKL|s5?|RmZ*Rl^gxP-bk zvLq?Z5jT#O(}e*VV>?#VI=Nt{u(6E6w(+RL8hZM89d1j4E>&;uDf-`MTcCdP9eFET zqo4<4yB5Dp?U)vB5~ogsjCfppSb-q$&%4cZ-O_Nla< zbR9eZ!XCrf_c64@KI8hd&3WII6uPV%f+}wnftAV<#HM^ni841%KAZ^6DK4#y*QVNn zc^uTtB{$N8Qp}}KmEEXxQ|$)>qZq!%azlyCPk~59`fUZb#RVCFAbIJH-N570l-rJD z(ZD=#$)Rz}hI88``aIq*RTnNX+%laO)zX81h^qvqzU z>n{@QpK{Ydzm>N1#nXM{?&!r=2R#U0?4Z~?u{l>z46I9mRt{w9b12TB7scV6VDUlv zC^vlJrZHc2Q)*MA$rzRJyb0?ia6nb)2ZymhvuQta;O2`;+Z~4ef(C|+pg9YZ)dlOl zsY3ZC-_eabWp=1%Ks@dv(I4d0Mh}>BixOrl-@lgS#qExn9TzrG_q~S%d%7RR(I8OI z>9#*O)U@m!_m`oca80YxttYzr5wm7qQ-^kpTAz|ULCKg?mN0qA)TF40+9vj&@Gsoc zu26&Jrk(MUIIWTB)?#q$L9h2cR!6y>&=O_7f8SiX^q8RffRK{0i*QoAPM>4xycmL= z+6oQ;ekMHV!{&H2EQ8nP-fHGOd^jN!?;qmg^2LbA`-YwCJb%uNp&M>APp9uz(KxVg!#?aH@fV9ZjlO_NOK-e|oEn}!>9R$Y!!+@y9rNS0^<=3luyQc9#Qsq5@ zeUmfI+3$?opKAZ%9xVbIWzFe*Lcl^eYZ zkJ?l>op^`31)sY(KhT5SXC8D+T5oENV$h)lTdfaXlY~nXtP;yyo+;~M+wQdQ)}-RU z2b$@f&?oIacGO8-R5yusa2FsNYix!tgxqO%v?$F6GbdeB(OGC@=%s{*O5KgD%+TEhylP>(4!g1S~6#tSGH^W-g$+#x$ZOB z#OhM>DDGZc;g`kJlh7zVb7li*25TmW9c=Kz?qCO*2x`Om$`w(Mp0SfjS>bY9uEtmzMg%U-=wkldKhE#0TSjtrsAK z5MFje4rRm814)U|uP}9nXC+)f8}D`+*PhAmnEADLGTZ5^zyh|aG$RrhtR$L-M_92Y z#@6Efz!dLAILH4N7Mjzj;eGjug^=9iV=&(U4%Xz@q4=F#H36-+wb+GeBwsE{#INmO zm=LQ;a*d2N@eycmpxb-cDbDMNMC2Lzg55VDFdH%8bNhdU!oCc$^c}lOr53f-M;65R z0HT&|&3u$=i{)xTw7n*Y;VdfaiQH<)ku3erh3lTmthse9R)YX8 z-ps?!pVOut9RO?e)MGe~%;}VRw)vhJf$Y-Ar06Cp1X8athW<#?l)q{Y}WBJf8Bhl7EcUi<=)ZrZSV6bas~2Da(sAe2zhEf=6p>?9WI8jxk03* z6{vxut>i5kNuGL4m1bn9ZQZLTK#T^GMSNoTjbviFBY==qPX_14aKtxv?f(RW6J_G3 z1x5NC;Cn><*^SrD?F_9Bi&|*e{tsaAdaq4d#@{fA0-c|${GqQ!pvsk z{mG4Ex?`KX=chGMa~*OOj3)1+h1MejP%VuT4M&*N-oWtdJN$N2UG8pf7V?Fp z-y|*9EvoSv5;GCDatcS413Y&b>taeQplr1};?`HmnE_v+K>;|KVG)Ffr|V1_B#aEU_vIvq{-M+T3U)+AGA9q_Hz}9&>~gY<-E1wA)SY`*=n*n+(GziU>-VtZ zwrcJ-AG)f|N4SF8rpQi~F>7_a)$5^j0-=wN&ihTlVOgze&J=k2%SM`?BobLj=b-U*3cFY3EScOfdS;M}Y z4Ow~ayp?w6HZS*2>*9zq z+#rTKJsj@WV$YN#h}@k_x;%lLF`VnW@Le`D%r<>=$kXKZ@r+eqtt{0Ep#%yDcWhKvDPiIxxAHwMN=9ywT~ zcZ&FqLm|NOeMWaZs|{kAGqdkM=yK`l`FrQMSWR}JuBBqhSxtmX(As{gB{IUavfxeK z{Hi&{0%!0GzW}S5HpSN@-7zuu5*KqyHw5<4U$;A>-Tnvw^6s1cl>y1b{kh7q+fO^F z19a|X$hk{$7o8C+z|sUhD%uy6$W+xO`ESe{)b2=yF9`5xsQ^h8-Y|XKnU7ov4QOuV zPq_832p9qiW8eGKx}*dtTe=-00`&y}HAA$YSHp*dOsLM}9H-XKY%7MU-^cJen-4RZ zC(OC^8CI2{80&^qn|361rL|u`o4(9ndOqz?ycc-sN#r|^&&(KvJHEJttiiVG=ifjO zSu{np+MbA)_mb>Xnzt?NfnsI%6L0D|Tf7?{x*>7}e;rThef!6H#k99(ugjvc78Jb| zIlK_k(*KtXIMmER{1BSzvhd3S#*EUJ#yAU6vPz9s-T-5bV65`$Akgg_MJ)#ftMNHa zm^6+WMzUd<3A{0_l%H!V6fDeK$BN4zX0(z=UQ^RgMP7BodVE5qABa=N3?X>Om-f>O zUc&}rUiCPRai+;6GRqI>c4V7yW0)HE+}6Z zBsj1ZCNQ#a?)_DBgpLGwgFLtrKUoSSzL5SJTGd*M%MjkRSG?riejQ-cEKgN<<@m%& zw@texQ+kQW@23ge5OMo@S(yur(JL)`M;q;~DJjb>h)lPx6u8Q;&aI~c{79%+&ge6r z{z61FqL6g(lz%@cbfuXHb$dc8#AGOP?%>e-Wc>H!BbA2Bl|nphNv6N-;9oh-|F-Gb zN?8nS=;95>uSkTv54_Ea37u46@~ zQ)*-LV;!4mkPbCm;09q|K5BABd31-FqEL1z>nXBb(raMZn7|59(Zm~Q=HNELndeY= zOZkfl?Qdy>sot@=`=Es~Vc+g2&RF&|#GSA0#Y}tn?tx(k zQGL0lW8g~aw6}Pag>rpRrCF<$>@=G&R|nP@U4iz?>K@9$mQ^YOO$`rLGW2x(Mzab&CiUyR^$a@W_7 zx+Y$x&1Yz~z?>+fV4?6#*68Mx-yavQv$E);jDzG6T%nlH+kIC)Q~y&v@baSk+0yS& zL%+cgDo&Lm?|XY_zGZwzKS)&X@V(uI60|3gPjT9z8)RH7PE{nINjhZKx;C8CicIyk zaXY-7F!NZ7yw{;m7VlRb@%MsYpcQj!H4JRjgE;>dg4}el|0V>7@MQ0IPxsB4m(D&_ z{C!A#{;n(vGIG{$n8 z>?h|%F6Q6gb|i?0Sk>x#9c3MU``q86q3`MZ)$C#?Ky;i_lT?=Zj?kCX&ft;{^OaLk z+sHpoPwTu2AMCRGau@prH_|(_!gDnevQ`$Q*6hCEM{dDYsRYYylLg=1X({f{W+t&gys(g7@UC)H+`~n01XrhtHs#h02bj@ooTiV=fZaMy1$X| zGng@|GdJh5Ogc@Jn=+aYQSX@v<*x!2T|SUZ(xLj=kWiGmL}|+K42b14Tln!$QS?76 zKp*CI<2GB96ji38SZ>xK=*LNEI9O-Z9#I=;=JutsQN9}kHqbX27y)!?m>^)=WI0jG z#!ya>+@D%0hM;O%zMUU4_imJYYR%NAvi5L%U%ww~BFL6~GTVzVzs zfh#(%EaGd9goN*X$B0gGxK}IKxj%$JxNGN{6kO?2H0cAh;;rMt%(WE+vQ5bY$nTI1 z7h4YhT}|zIy2eZWgk=w}B8#`E@NVWtAK9;h>aEWjn(4vCL5eM1+loY9Z&w9m5yehx z&vL-V$9EZu&ZbMWVjKsrg$0YFTW^@{dWRnk9_Nwd9w{jPyHCTbQ+L;p++c5@%SINO z!X6KL#+<|3qiR2=#hgR|faaiSRu0GK=;#LTGcN>rCr9p_cXXTAFOhi2tr7kY8qo~J z@BaM*F<_#)uuB4Wvm!lBHfQyza}s)h=swNQ0&Q+$fVFT}3i%XZjXnXSzuPAoee)jvI=%m0l>G`nkbf?2=^pxtzqF1(Uz+yKY(gYkx}+)cs9 zvg8g|WntQypSd`+V!?~#OOR(?Ar-w`2<0XxdDyZ+s5`;2-|R`!(^=Q(4*1z~X2?`kcdYgFe}uue7nUpqrqdm&ZN=%x z)7v*|DMhuEzzx~QxP3oza>5Hl6M`URXFn$sD1Z;KD#aFmc$47A2b zQ@D*U{;PC+wH3Li5HBq{e9gk`Vydu|H7`qEcamEsL7F7>^K%<(@QPh*ljroaOtL8^ z;{A>G!fI_8b+0bfqGyql_31pd`04R>25~!SsQTt4W22SG#a_LjHr9 zG*a;ItnFm=Ci3p*B zH_J3BS#+JV!LQtIcEPX#BY@SxtNsNEp-MgXk6dZ&8h!NwFJ#^a7zGpUr$2^aZ4v)^ zmceyQSsQ5_r#n06obXW|ID+e;J9}d;m5R^@Zyxt2X1@Z)d@|Z;dr|rJ$d#LlNR%?yY%idz6|y$8w>yIO8~xkN2bDV!q}XqQ|~-6yLqoWC-_&1 z`~T5qO3H3lzrmEa{S&#hIO@`P&OEDGmq3fknJE7z9LzpyM%2Rlb{)@kKp9!&)`iOu z#8B63+p$K88rk@oaK+-Fy^`Vq<~w|Lm_Uc&k}x8pU?M6YP~ESjPokVry<<5!;7YJp zv6SS-V-+RD&muSRXJ07|T;U_(FN(e*f0w#0HK)B&6?`b6FF=x{=s>}Sq;FxH`miFz zgH5=#-9?SDBA~aN4Cn`%4vo1Fva3@pH~eZRf95dfx$9UENaj&Kx!AjO&(uF`9cj%4 z9k(!lH|YOG0$VB{Dwl0dC;WbdQ(@yMC^Td8&dsmce7y*Qx93H8Robn1zw43@FPP`3 z+3f4xrQCAAC@Zu}kvw`qjn1|VZyK9s*rUJNifEf@DnqS48h#hGGLgzY#gb;dM_sYw zGGT@yLv`GI5Kb!)0-1<06$>fn1zvx&?D75D*uDaH0<(xtLSUpQs1ArvM(H8%G0R;55R z9-_CZ^X8%Ko?o?h1(Qc=pn~cr*yzLv#2k+Lc1crzdwFs z8vh@G*IOZE*T5$5(U6tFmf~`<8f(~-S!4(#WuYz8s`LKp*+@>ce-6cFj)zr6+*UA; zlsPcCQD8x=jb(4nb9_;(-3BxsprtcMGU%2gHJ*rU4qEig&*Fh}JOc(tz3u(lp*#W|fiOX;1H_KZ}xD(~MInrb)A{hKX5kj(!Q zEHAz2XcIYQX1H3WydK24fHgQ)a`V8a*jnO9_yPBU|pKW1IN!_owCm#LELx zwcCu#RKASr1>?$lk?!0&f?3=NE>I zxo%Py*#ZSClS-`d!I;>?m&_ojPGm_TixV)q37vOUyJ8}nLT~aweOdW?P^f>|@Xq%D z6)$Q&M#tBGRwBw}kicw~!P2Z|riT6dn59I;6a%~qW%qhgc_kUgFD}Divs>mznlR}1 zi{!f5(5&&JBH3#)nUoF4UxoIl@PMf+mACFXVA4tEpo2-+FRwbRG)ld!NT1_M_KoX3 z${pO2EIpgN*U}sf+YuBAid=2wc*%Bhdg(W*K9x-Xj+lwgD$w6JKB#!&L9MDzhIcE zE%8=#dO*dXZN&;I_mh8#Jn%0nqU8PaSN?g!cKeCkl}ebZJ4~f#4q7+(61@AVu$}tB zr2?J3_TH;@aQVl2p*`~S=?44DI`Zr_2shE1QP@SmF7#0ZCKRJ<=yrz0945*wX5#(c zSj`&#ezPUNyV8}RbkCO>p$Rolvk#dz-4xDhtwpv~Ix;g*F^F`Z?^ACFv(U8zAvPTU z=3gU!Z2cil9&M!j9|dbt>Q6g#O6$%KuS!CiHTrfL@Qwp%!KRd37%k>+q7`od6=MF2 ztTQ^3?psq0jO_@5UKg5y{h0K4FF8`_peZi!((IT#Aw6} zezV6zd*Q}T)Hhd5P#x^E%T!%`u>0(79!?r13Co@u*Xwq)HM3>ghs}iDG=?{V0zks~ zDdgIhiv+F9V8*BYGaQnTI<}qazPvPx-pQgi%cBgYI;xY|^0BHl<%u>nQ-OSKR`<)3TDgi(X zVd0U5`V^bNN)v(k^umBHHp~1_dEr~bHbW=RHqis4zK2=z4aQ2YTWl?Rw}R|Tlkc(e z0imT)utJj*G^yi7&=bwg;H@Pp#88>r$OAw$ynJDWLhqJBX!#?6*SZqRV`4$Mm4$4I zA|9Z>zc%Fq!U`9W8!&UsUR;#(0wkK9+nNXLSeoB02oC>#kom>VXu#76{sg`kc{O3_ zILbkv)Zf1#h9%1516t{bg3Y5*x|*|f$AUvk7btk;c@rtnBO?jb!IkRr1|;kuqBJhq zws^ZO*J4_m%3#$X1fsSy%F!(NA|#+0?^?CuKUiJYj4a_!(6j$Xqbp2hJs2rNT>Z}# z{TlHxoP8X|q0AM8gRj(ZQI?JqU9^aY8>;X!&RJ4}vU8vL2+k_#yADX>6c=Ede|?NC z{Gw7GknrV{C4wg0+~>fY9Ei1!Nx$xynG1_sYF0!p4DD=feEAh3-AoT?V?=26&Ox?>=)PjPGKr-WP(2VlKIRn=D1JkG(rr=f&D@2z>Sggn&@^26Ld z{wH%QL7*QBWv?s%OLBt($db&Mfc5GL7RVS^uC;|1S$UcmacjO1oh^eh`| z9WP9jbKqGht7$3>ymU@GiJ)L4w(MStoP@S3O z15xIKlE4@e z&=9C`;H`FeQfsD_?B_HoZEE;mWDX+kZ@f<1ggttif=z}b+Rsk07XEDQR{(TRMO>71 zTg9X2FQmN>dIWSpH`ic^7&LD-;|g_63r0=OH?|Y1a@2w|H#au8u1_a*`R=Z$e`d;M zS*D4PiRJ&11BFii&pA+|U@nhu{K)+erd8Zn>&mp?A+5@s=&fh#R5deS=1W2kT;fHR zdy5xH8>s-TJ0EuLW=7}P0GX>R35=DyY(+=EZT(l#`(pD_^p^Cq@o0ZAN@^!TR$+Qt zaIC`1(2P{vQof?Ik)Ym9PUpmYnf8I=r~tVT7S*>h;7XPjD!>HY3~Qt!P;nU?KNRL| zB8_)SSvm7SqiDz>D??F$GIBL2OZtb{}+pw3>vh?o`jusH9Sb$$vyE1_zs71mJrA)&x=!xI$lZl?Pi+FzNB0na6(`^ z57Sh&I8vDjoNDpOIng?JAHW~&i z&3-v!tuH^Ev6y0d>Bx$5`W(-9Z~lRQ3yy9)iW3`~aOIW@{}UTwzP60OgI zVe2We-cD)%7*Vi~={|o+b!z@Eu-mt5J0e{Fg&ZD!app%YQ*+h1PDa9N=OFw_W>(qD zB37L2E!$4jC9jZ2z>%W!n2wZBa?xgpvDQ9-`24+Lyo>q*#hlt?!E6WaSPqT9bhP_*wcI1KSR3|2<@UNWgxE|3Wk#Ip@_8!iGY^HO+t8$uFlF~a$02g2~ytA}@fI=3W zR=xH+W#ByH3;Vc43DV-#(zlf{g5;4wIZaxn_&?P-5NZ7u^a8qqn)7#bIN|3)>(ZJh&fjQM<=*`JvyxO40j3{c&GBEwG$wV zosT-{g9xam5~A^Rxi1pqj!IJM8~V2I*{m=O$da;7sxSSm4LN3#&qPDwElG!6samAA zV?`HFHgtFoYzgWDMQ33HX?fhYwi&)>woYQC#p=gZ0B$ZXMb!M^GmV6 z&yvf>tM=b4`Nr#0+b%N+Ob1^zpAaUj@3u-HB)07$Eyc16qhSK{aBRQ8;=zY6q65OQ z3cKiy#Qs_kgWrWNuNm-PseZ86@jQ1CUq;TGrPoN7o=|ufN)i6*V+*}`QvF`X66PtB zD`$I;T+N4>6*1jLI$XP}#&`{I8c%Fj_-y_+M;6<}eUcUy&O+uv=Oq6jFhv1>;Rtt? zJ3nrxMEQ&S-A{flh=qKwS9t4qdjrV9#6)~Gw z38~25Tdi@BpcrF~5cJLZ{t!fr%ZwYw3_}!TYYNmb`EAVNhJD}eQ_IH#G$(^Ep@F!$ zcnB3U#2g4?kX~!fN#9#vXJB~dLR-`%@fp(w3|H!tjYM5T)c)AK+2F}|5+SEJ6ZcU? zt};m?*z5|rNYaHr`}%B-F7%Cqo|5Pn3{kuB57L{IF&P1QwI>}p6%;U1Sp8NaIJaQF zx|XIsrCtxZ3APxbk3tV|@6(e2e`w-%TG~~KGo??bu`~k`jm1SX^SbaP6W=n}xK@8^ zXiesz*|kf8Hsh(h)}+#EmnZ_X2{Jh0QgXHs|H>#1+xC=r9AwTlI{#vTbPwwlFGqdC zQi5f=16ao#%EE}d)9_*buOBpR_mieg2;6_>Ahbj2frMMk0Al&>bX*3lU#!`o{nM(JO9ZRSf8D_Yy!yQDNeGXaPTfriNe zR>~jVb_xM-NxU6G0!In5?sj`G?y=fV(Utg0#r}^LdGP|n)(^M;i$#u#c|UKu$2<|N zCLy)o?a8#mS~y-j?h?WW4~U~I2~OcPMg4~BU{%x)xK_>h0oRnRAHjF?iP}@U$?vv; zz207U|4AVvoECW4*0vmNk#aR-wNW52%~Xw7$~kPSR~T2nc#5+bzSa7fqfls$S*ARX zPnn;KY}kIAmw4Nc$qn8swNoFhT`LHdEdAtE5f?6+*ogscu1qOyp`W15$r@8iHvX&G zDt*bz>($Qh5bVdn)cKQzzxbY~bg@_6e+ZjX^RQw=@{FZThfEV_vcMH0S+yr*>O#0^ z4N&G;fr?TxcXVbeoHRazfSfaGSs{Ey8%I^zf`if_T^U`Y3R&Mw)&-VuHqo`~=R$N^ z=8}vc!N#fj=97W?NT)}Ldl`2d#5ro|F0cCf#)=MnA4!2jW+x`Wsjt=-(=$S99+M%y zp<|OuKK&O1oz1l6L(@HRV9Re}5Q>gl1jm|3xxeZyb^%KHW|vfl=iyew98eMuZ_}Gk z<>2b}nkWSabZjqzdL_!AxOR-l2cZiS+i&A4W*e`t*&lzJf4YCYmvfD#wc_wZQTXqs zp!5*T%JAHW?7<8@pQDz}z^@+epV3=zb6AZ?NOiczvs;R@nh^D8uw52VmyON9;mwctT%R(&!2{o^NV}+52KP zKo|@SoX{=L1CP(kpwkY@IgeI~pBkk)b{zn?zw2A>{2PzA)yqb0S6F-#{Sp6F#>9Ws z6X=>%Vq`K$C+H2U3-+#O;WCUrNeQW$&Zj7p2daf)=sG%UZ+)fszR6U;^uA*Z+s%DC zn7(9WnJprRYyFmH68f)L|EXuJ(!Wo*xEOC%L`7@T#WU9J(H5QqR!5<3578d8c(zWa z1Bq1d?#hBqPK;&nnu&AaiVF=(;R|}zg?mFaR$mWnC*NMJNydBnK$!)=;eJjHDzyud zQ|#KKOSN)-c1&XH9&u3R2Uq^*)W;8tZ778M`9hIXn0^>A(em}^gKq6+KyAPGqLE}* zS>@fFX^hq0)vDyKd|ALPMCI&!cVZ}t7Uy(Iv_&_|yik)@)&e`mvop0@!X}4xmJ-aV z6`>A&GMwD%+g%U>x#bAD!-aduMp(f)v--N~;H(|gaLQvFmacp>j$lE32ZnS$(x zDjoC&R)u{=boM=Las8Ilt@hzHLaa;-7ra}uX{_(Ivz)GnQAbt1Iym@!C1B0HY&o;u z2OizF`Q!ob)@;(PawJvpPCZICsqg$aogzuVRXA;PzU7B0pv-4F4tDQaf;xN4#$?%&_v2;#l))Wp(yp1W*)bugHYn8h+?~mXf z)~Hm=EJaj3gy9E(kE0Xmo$u8?8ivJFCH0}pr~N9qq#4Khe9_a|pWvNcjK37UoA|f= z7R;)}qz@Byxl9V{{ER%i_Z!rU+6+VapPZ!l^J7BKPxL>~v$}=_w&_9_BF37|xUFTv zZ@s;nf`5ByJu1yOgtwU8pGgLFb2vn{Hr**~;E$_^MJ9;$#!->r-OmldGw^PVRImMx zB#UVH&VaCgj0O(~A^*1Zm>UBaYGblCkk4Z(tHkw~+YBIY)Y2Dt%~G)O`H0m;-R4xB>Ek=dQf-bdo=(E! z<70o5uH}1@!at+aM~7jq1ZY=w?yS4vlzD9tAO2I$=AUn1uwcLSBQTxwdI9m{yr-rt zWP%emtBi3fB+O#&U9e=0E`h$M*j5ATsDpmA&{bP67k<((wWe%*6hG?0bo zBU@vBw6tB8eXN9Y{S>%(J8mo5d0=?@N3d?Lc@A0*?-pbFy0Z4Xw-*SnAZzU{$QZlxR*R17^x z=4gU$Z1xB8_Jt5#o@j-tm=kT>ZD78@_On<&!hPjel-fAuAEMMc+=r=44ovk&*@7@9 zOz0u=GksWDth}*Q8>_MSk5)PW2Mwp<0qy3v;oZ*p`7|!z`e|E|K>Fxg-lEUp4)TZm z%UPa!cP^$T!K$7mD;=Ya5`{Sdg41IJ$s5Di@Mj0`4^?@*5pl=_d^~XJ{#`GjtG16O z;ai#Rl5X!)xd-ly<-F|#kgp!x}|wxNx3PxGQViF^UTcMc(QgY%sf%&O1i&Zlaqj4h8erFP9@%{~PNjQ!JEu3+m7`jfR(9N= z?AVL!Mu{_LYD}}+xLMKe&A}J>=Z}_%NYnc;S0OB{B^J)gfdNe(98OG{x3egx)hVvz z>A6>eSBQ4v$1vW%fL5jP;KK^i%D=F&uqFwzv@1Jf?kD2=J(_CwwvApRZrw>L<(628 z7|Q5H7)&nimkp^3Rn#Uv1>mvB_xkj0z{&K?^GSaG`=1)}QW~E&er+v0LDZ9E2R;;M zQx9$HiU;^XUJv%o-^=(U{}GBA4mZoYgF(&z+@gnRpqs2n)OOVN(;LAZv;rH4h>w}! z#%{g|@Hsk_ELWFAFO4v8CII+a6phJ2F;6eL4=PJMzy0kM;dBE&l>Iwr%68|Fs#<#v znkNQKXsvGWl7_2Of7U<6w8;orXT57)W|lQI`wVI=q`&&nt5;6QrQc>QEE=J#nuwF3 zwM@+HNN`~bD5*O9yPI5MY*`=SzV@Kcwde_el)r1OGSCOt%4pW9cjn=+=T!iXeU*KM zoi*6$(td9{(Q7^+vg!FU|B)hNIw!yJuWpH+aOL=J--ao6odhQ@xm7&*t(6deEdy#v zZh<!hl=;&>GoVnX=>Zjb8&Run4!+iccvj%-Is=xQTNnyiK6r1w ziOj)LHnt0|7WG-Y7fA+4CtubiA1Oq|1R544_%M$N(FF$^l$=gp5ee@)<@lG>_RAzR zB})XJuB|KTqBseCS}a{9TlGLo2oPy5Rx`og@x&#Vv9g-#h3C72DlsaZ28l>ZK_H!5 zo=qg=GNqmWwwzyp6;g)R@vMXbYjGIDs zGq%XSj-8QxGPV!~lN6Ec8QUoP*q7|2M1I#0x$ocidpyta{5OYlu4~S@W}MgO^FH72 z*LZTcR!KnI96y|-Yye>DWS5Do<$ zZ1Tj5>WMAM4d_~{g4GPq3V|mKPHxIUP3h);?0P3AUH3Qsdjh)^C5PSsviZMQ0SYWV zsjrilkU!!U)_=xU<*}!fGVcD2OuFvE=MAfbsv%$A5lmD*PeL~I+}6p@vp#23`EnqY zjRUFpR!P|~zES#gM1Gh<4YB((t1K_&WcjPnq=?(7;zJc!frb71n#&a@@uk+l!o^8X zHM%1R2V0zl37F>E{HBoKK&sU-eR+N`B^6a=YfoV(O=5KLFQA`lb@`NBXv#2vdS_> zH~($czD@;JcKX=}9|)2~B%Ldg=DO>p^)oNmP2i5sD&yCxG8XA?rp*6}=iRwB>Tofr ze-ynYURhc3<0oFwG)5K+P|WF^JB)2zl8T76>P?0)@G)NAEoZZO^y4z%9z~? z-A^XHR@6Yrvmy<*5jcY6Jw~QO3>`r%zS?u=mG9vX$gxw|<{vWK>nD&QuvStW_U4~}TjLP?B>&FsG8E1}{Y@Ch?Rq||Ma)w=rh ztsOYJd9V6J@_moR=!leMQ2pjah=1%y=$if(L(Xu`*lng*PutB8A;iH07l(W%PQW7f+O>%`K2_b+a;uY3gJ4rUHds#$< zh$5F4ic49y&VeWY3E&!fN2@G^Dw1TNc=XN5hu* z7))hJaB*xr*5T4T>D`o0H&l>{Ov~auZxM*)>Nq4Z#02{*19&a)7Y3r|Y_(6gYy6;F z_DgDY(3L&oc4=s3yP%2^AKaG~)KdlJSs!1xLka?Z|Ab`n1ebrAHC8X?168c&x|`U{ zl9GAx`^h}$4B(nG=4&9x@T}*keHj1y&LIfGO?D*l(IinC{!R}aZ~aM`{_;COXHOn* z)Z&8sGJ!lDXa7$$i08W`8pP_(Dz}|imzB9h^^o?kYt-iFkBuBeun60Bhr=tWkQpy0 z;D$OyjIl0aw7f|^ljLp4YZwhVL;noyZ?b%x1>E&NARw%v?f(RN@mPMC64tR3%>mvJ zP#QP5t6ZIj`59z73p?j{x|w0);ry{BE`rLC&N`M-*rWH{4q{k@g%H1{pd<4W`^fNs z^F}*Q`llPN@YsSx*0q=`t@7RHXj;aY``vAlApBm{aW}yKUdDUQ zU2Z405B(3m?D|Vk&17PTbU5KOvL%|Q37$P$G$9gF0H%9=y-NOV=$sXdGPFoqqIp(@ z`d4(2s(g`50SL{dJzfn2k4cC76DUcD$JZ=j{OUjrdE2Q+YA~VnVklh7TDyFNUPian;W3_Of}crUObnZpDYcRTptAZuY>yfUJ0B zXNFsVRfvYU=Ujare`KJFG~oNlVv8O1mQP5@M!*1bsD$({RyEuh49(qo`yvpwQbgpt zGPeac{S+Z-u6^W5t5r(0l`_zZ6_3EoIE`jQmJo#3Qg16t+^Z3Qi{_zYV%1#?_rRruf zD=QnSnO7y&x-?b61oTiEAm9HBRFb>U1bH9Vaf0zPmv z`_Aj%G)r{6nYi%$p}m-GqUm3-t8>g(U+Whn+cJWmf5xmbYE`aS)LEsj+(JluT$tVr zsP2%b0xti~DYuft>YTi2i73p`HJVn55cc*HY>`;PgS6K{T|VP#1?WtTb-=wdBHJP zUwc(-=Z)IDKmzOCE~z7vCp-mYys6Xd?RSj*#y9gS@Va2JZ~`;3&tmK4dxtO>)U>4S z6<2a%1^&<>jod|I-@bjTf)YbakVIN6_ESyNtV%s$?KH~U zCbKPKu+XtUZR7>&Ahu^z$qdEi6Y2S%u&^2p&Y`1C3xwE<-9Vn^lyNm-;RH#B!D4Xg`FJ^8f+U=zNV2&%0(JWD%&(E0j2KMWlNZ z)%s+5uNrC`9lbAA?nvt@2-HKiO@GGVPo#fy;qqAL(2JxJHo5c=)l(?nn!bag0_kr6 z)ST^b&5yJ6~~c@GievP({0wGq_PCT{=fpeiy9ypy;pAlbpn z?RPd-bwMEB64zDwIbrN#x2XVcA9vQhz-ib+dlI&)1!$6~C(pU8+)^Oidd2d}(a2R( zt5+;YX7y!eP{f+K0!c9w?9~B~ywxL(H1vsTv29n)#BanK8?hG(gH$ctH59qK+|R^Y zGVQuPI!*eB%urZF6IQ#;$gP_ONw;pP8%cHE1p5%B07B^Y))NqU`6T_T7x3lzfQFYI z@S@{l16k_xI-wIcm9Lp9indta@7_q+LJW9bIsdwX@$!coevr^kX=3JmyW14k=WsFk zurBSt9{{YTe}agPjH2#y4&+->Xj-&w>}1dP&jZAfFpg)Y}xj z30VxTLcEx|ouwaHvDeK~w;@)rmDf+--T(O_mZ{FJ<2Erh1 zUN3aW3VqHRU7u00X?cK>Jy=_7U1;mygnC>nKaOk#0sC-bqEz-k8EocmVzgJp5cXc> z&&?9oFWQlvz4G8l_fao#Mo`iE42>ePh__cE!&{DFu+cIL4SXfyG3@;Fo?SHjN(Je7 zXk#CH(M}!(3?C=SP%rV&_lzb%kRU4>_~@L}S%#i&lqNx)KzN`^JEE=dDqT{rKTj2v zFqYi(UB;xg${0?T%b8084fh1Q&S_(a!+pL~K3f1%S0*Ef0qdD4M!GMa=&chDMa&+z zUh=Nx=Gbn1A~jdOU9(f!ngOW=vh!E(1NDMQ>qzfrbMI185H0BtmkN7&``F+*(ayRa zY}%Ceu(!UV@23&Q&X;q@^G=TiK`z*$P-0YQvpF@`F*U; z%EyB0(2)d8f0m79eRi)D|Dg8cgyC%!EoOf06H3Mzn(7tg96qF26W$p=tBpC?P6d;0 zpK^=_U4Zpp=uWlicl!2&^5+5ZC!pBWsG0WUON!w>D3@)YH1b9JzHEuKuS&ibk z)Hg@cn+&N&0riaxd!majaVok?mw9e{IHz;rFVgH4(M3K>sQgnrtEwR#K3ou`@966tnA&wVjm>9HsdhZ1; z&`a652n(V3+OHa4?k2{KxF31$;5TC!YBThY(8XuwTSFFSc$XC30hu*2E7s=$(;kM3 zP72ofEe?LwuFc~6zB4Y|#Le%<^o*!*p5)*ktTYU@=dJSE0c5ni19>aBK+h-8y zV13rLz|3W`!3@(^h;QJ1uXR7(p2CfVF^%za*gPkZFBK`#k_iT>P85qg8>euZBIS(kZP9Har&{q$;EKF-&!DYwaG1Y)ri+D+%G|o{y01Lr&$V#k(h zmyZmP3mfsBi9EnVfcd$TozBhWnsD61n{h_>{ibSwy?Gif*+}m8X!ht?l}ss95s@{YU_$fPhF>lV zs-l78AS>a^N1fgb$MVnW%JFKCg*+>#O>YEUkS4@W@&4;kF=Ym;8L~4M6veX zwnue!z@3AEsXqJuD!EvM^%sxGO|L=UW?zWxsaC9ZEXzK1A{Y!xn8m7vK|=tBa^xlG z<0^P0vYcWWPp+=Lg9#?w*f_ZltS!w5S+0q-wk=r3OAO`+#I5MG7T#XQ*DIkG&uZPR z&>%4^sNiuF^zvEwCpUmEVIu0U96Fza?P6wuRA5gy_&#nS zsFh@XI0jPxIG$`d;Ts^@PbL0hNAGk5j(q=2${=y$S=rMO&+VjvPq2c@YBF|&!oK9! zOdHs_055a@3vho$z%!NrD^;{C2%!~xTlviGr-3C)baJav2u){((A6tkakn@lA1ppx zYZ`lS*`!z!`J$?yazk7X`NAVO`v(MI4RwBY(L?S|e=p9qT&|H_AF*A|m{lP^Hlr0n zRTXE*B5t!&tR$Xw3S5f73)~k!H^Buz(LlbzeP8BzCn1|EehnDa?&o{4Y*i@&qsd&~ zLzeZVtyLH11i}2-0eezPzs4MkogX5w?$HZ3H%?tJvTRT#(+l zbl?ZS{Wu(Wen<#BOg*u0K1}*>dePw9r!tT}0vk*G;J}dcEpLMqW-GKYYOF%2F;DDP zo0Y4PqVfKVm3nWrO^o$jX0)wEv(5uX%sxLl zZ!-!I?e+boHcrAQIkwyY!znN@v$P;4T-@oY4tEwbK4CpXsZviDAGCF+nNq|tO}?zn z4uFBonK_ym?bYQUfD$!XgCVq5;pPB4Pxs#`klr8nguyN6M1Wj`+7oU|%JqdY5q~#} ztq1g$h6#1&kb@l@V0AyXxD0)_tYq0cRR^_CiflDv3O~=-6)vk;&m%r;8V|qnfGL!L9>A~En zL$3Ln`R95wYV$A%VT`%zQ}3pV;Yj5ab}Jy(=^bgoB{(RwzKUjDA$zFsOUHr~u-ma` z(1P*#B2It~4dAnsfnPQtcgNl;s7j?v+?wrurOzk!^)N!^1Cq!}1v=7`Z3r~}2pWrF z#+SK>iLTlC{9%>oinnKhSDp%Di!E zL__e%fD37W91zM2rL(CCHYJ+rAAK7#4AZKWd8q?rhqdTlLamhkFy@G! zZDej4Y#I`%Y*}wlKA2_B~BBpk>QB5EjNc{M_i4UZn z(Iy_Dh50LXwiM_AXS7a?83ci(!Cik|yLPVt1Vmf&1{hqZ3&0g|)q0<&wETtA02%Ye z>bB~;Di#rO z9?^o0utwM^4?s#lf=)M7fkk!vx&bJp4jLqR02GpASlYaD&hAmB6xtCyvz(>Z%CXvn z&i^H&FngSlidWwrD3ly0&&b-1 in a))}function x(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(w.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return g.call(b,a)>=0!==c})}function D(a,b){for(;(a=a[b])&&1!==a.nodeType;);return a}function G(a){var b=F[a]={};return n.each(a.match(E)||[],function(a,c){b[c]=!0}),b}function I(){l.removeEventListener("DOMContentLoaded",I,!1),a.removeEventListener("load",I,!1),n.ready()}function K(){Object.defineProperty(this.cache={},0,{get:function(){return{}}}),this.expando=n.expando+Math.random()}function P(a,b,c){var d;if(void 0===c&&1===a.nodeType)if(d="data-"+b.replace(O,"-$1").toLowerCase(),"string"==typeof(c=a.getAttribute(d))){try{c="true"===c||"false"!==c&&("null"===c?null:+c+""===c?+c:N.test(c)?n.parseJSON(c):c)}catch(e){}M.set(a,b,c)}else c=void 0;return c}function Z(){return!0}function $(){return!1}function _(){try{return l.activeElement}catch(a){}}function jb(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function kb(a){return a.type=(null!==a.getAttribute("type"))+"/"+a.type,a}function lb(a){var b=gb.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function mb(a,b){for(var c=0,d=a.length;d>c;c++)L.set(a[c],"globalEval",!b||L.get(b[c],"globalEval"))}function nb(a,b){var c,d,e,f,g,h,i,j;if(1===b.nodeType){if(L.hasData(a)&&(f=L.access(a),g=L.set(b,f),j=f.events)){delete g.handle,g.events={};for(e in j)for(c=0,d=j[e].length;d>c;c++)n.event.add(b,e,j[e][c])}M.hasData(a)&&(h=M.access(a),i=n.extend({},h),M.set(b,i))}}function ob(a,b){var c=a.getElementsByTagName?a.getElementsByTagName(b||"*"):a.querySelectorAll?a.querySelectorAll(b||"*"):[];return void 0===b||b&&n.nodeName(a,b)?n.merge([a],c):c}function pb(a,b){var c=b.nodeName.toLowerCase();"input"===c&&T.test(a.type)?b.checked=a.checked:("input"===c||"textarea"===c)&&(b.defaultValue=a.defaultValue)}function sb(b,c){var d,e=n(c.createElement(b)).appendTo(c.body),f=a.getDefaultComputedStyle&&(d=a.getDefaultComputedStyle(e[0]))?d.display:n.css(e[0],"display");return e.detach(),f}function tb(a){var b=l,c=rb[a];return c||(c=sb(a,b),"none"!==c&&c||(qb=(qb||n("